feat: integrated solana validator with yarn test
This commit is contained in:
parent
12966235f2
commit
0fc3308710
|
@ -1,4 +1,5 @@
|
|||
/target
|
||||
/node_modules
|
||||
/test-ledger
|
||||
/config
|
||||
/config
|
||||
**/validator.log
|
|
@ -15,7 +15,7 @@
|
|||
"scripts": {
|
||||
"test": "jest --detectOpenHandles",
|
||||
"start:lite-rpc": "lite-rpc",
|
||||
"test:test-validator": "start-server-and-test 'target/debug/lite-rpc --port 9000 --subscription-port 9001 --rpc-url http://localhost:8899 --websocket-url ws://localhost:8900/' http://localhost:8899/health test"
|
||||
"test:test-validator": "start-server-and-test './scripts/run.sh & target/debug/lite-rpc --port 9000 --subscription-port 9001 --rpc-url http://localhost:8899 --websocket-url ws://localhost:8900/' http://localhost:8899/health test"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^4.8.4"
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Builds known downstream projects against local solana source
|
||||
#
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
source ci/_
|
||||
source scripts/patch-crates.sh
|
||||
source scripts/read-cargo-variable.sh
|
||||
|
||||
solana_ver=$(readCargoVariable version sdk/Cargo.toml)
|
||||
solana_dir=$PWD
|
||||
cargo="$solana_dir"/cargo
|
||||
cargo_build_sbf="$solana_dir"/cargo-build-sbf
|
||||
cargo_test_sbf="$solana_dir"/cargo-test-sbf
|
||||
|
||||
mkdir -p target/downstream-projects-anchor
|
||||
cd target/downstream-projects-anchor
|
||||
|
||||
update_anchor_dependencies() {
|
||||
declare project_root="$1"
|
||||
declare anchor_ver="$2"
|
||||
declare tomls=()
|
||||
while IFS='' read -r line; do tomls+=("$line"); done < <(find "$project_root" -name Cargo.toml)
|
||||
|
||||
sed -i -e "s#\(anchor-lang = \"\)[^\"]*\(\"\)#\1=$anchor_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(anchor-spl = \"\)[^\"]*\(\"\)#\1=$anchor_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(anchor-lang = { version = \"\)[^\"]*\(\"\)#\1=$anchor_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(anchor-spl = { version = \"\)[^\"]*\(\"\)#\1=$anchor_ver\2#g" "${tomls[@]}" || return $?
|
||||
}
|
||||
|
||||
patch_crates_io_anchor() {
|
||||
declare Cargo_toml="$1"
|
||||
declare anchor_dir="$2"
|
||||
cat >> "$Cargo_toml" <<EOF
|
||||
anchor-lang = { path = "$anchor_dir/lang" }
|
||||
anchor-spl = { path = "$anchor_dir/spl" }
|
||||
EOF
|
||||
}
|
||||
|
||||
# NOTE This isn't run in a subshell to get $anchor_dir and $anchor_ver
|
||||
anchor() {
|
||||
set -x
|
||||
rm -rf anchor
|
||||
git clone https://github.com/coral-xyz/anchor.git
|
||||
cd anchor
|
||||
|
||||
update_solana_dependencies . "$solana_ver"
|
||||
patch_crates_io_solana Cargo.toml "$solana_dir"
|
||||
|
||||
$cargo build
|
||||
$cargo test
|
||||
|
||||
anchor_dir=$PWD
|
||||
anchor_ver=$(readCargoVariable version "$anchor_dir"/lang/Cargo.toml)
|
||||
|
||||
cd "$solana_dir"/target/downstream-projects-anchor
|
||||
}
|
||||
|
||||
mango() {
|
||||
(
|
||||
set -x
|
||||
rm -rf mango-v3
|
||||
git clone https://github.com/blockworks-foundation/mango-v3
|
||||
cd mango-v3
|
||||
|
||||
update_solana_dependencies . "$solana_ver"
|
||||
update_anchor_dependencies . "$anchor_ver"
|
||||
patch_crates_io_solana Cargo.toml "$solana_dir"
|
||||
patch_crates_io_anchor Cargo.toml "$anchor_dir"
|
||||
|
||||
$cargo build
|
||||
$cargo test
|
||||
$cargo_build_sbf
|
||||
$cargo_test_sbf
|
||||
)
|
||||
}
|
||||
|
||||
metaplex() {
|
||||
(
|
||||
set -x
|
||||
rm -rf metaplex-program-library
|
||||
git clone https://github.com/metaplex-foundation/metaplex-program-library
|
||||
cd metaplex-program-library
|
||||
|
||||
update_solana_dependencies . "$solana_ver"
|
||||
update_anchor_dependencies . "$anchor_ver"
|
||||
patch_crates_io_solana Cargo.toml "$solana_dir"
|
||||
patch_crates_io_anchor Cargo.toml "$anchor_dir"
|
||||
|
||||
$cargo build
|
||||
$cargo test
|
||||
$cargo_build_sbf
|
||||
$cargo_test_sbf
|
||||
)
|
||||
}
|
||||
|
||||
_ anchor
|
||||
#_ metaplex
|
||||
#_ mango
|
|
@ -0,0 +1,116 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Builds known downstream projects against local solana source
|
||||
#
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
source ci/_
|
||||
source ci/semver_bash/semver.sh
|
||||
source scripts/patch-crates.sh
|
||||
source scripts/read-cargo-variable.sh
|
||||
|
||||
solana_ver=$(readCargoVariable version sdk/Cargo.toml)
|
||||
solana_dir=$PWD
|
||||
cargo="$solana_dir"/cargo
|
||||
cargo_build_sbf="$solana_dir"/cargo-build-sbf
|
||||
cargo_test_sbf="$solana_dir"/cargo-test-sbf
|
||||
|
||||
mkdir -p target/downstream-projects
|
||||
cd target/downstream-projects
|
||||
|
||||
example_helloworld() {
|
||||
(
|
||||
set -x
|
||||
rm -rf example-helloworld
|
||||
git clone https://github.com/solana-labs/example-helloworld.git
|
||||
cd example-helloworld
|
||||
|
||||
update_solana_dependencies src/program-rust "$solana_ver"
|
||||
patch_crates_io_solana src/program-rust/Cargo.toml "$solana_dir"
|
||||
echo "[workspace]" >> src/program-rust/Cargo.toml
|
||||
|
||||
$cargo_build_sbf \
|
||||
--manifest-path src/program-rust/Cargo.toml
|
||||
|
||||
# TODO: Build src/program-c/...
|
||||
)
|
||||
}
|
||||
|
||||
spl() {
|
||||
(
|
||||
# Mind the order!
|
||||
PROGRAMS=(
|
||||
instruction-padding/program
|
||||
token/program
|
||||
token/program-2022
|
||||
token/program-2022-test
|
||||
associated-token-account/program
|
||||
token-upgrade/program
|
||||
feature-proposal/program
|
||||
governance/addin-mock/program
|
||||
governance/program
|
||||
memo/program
|
||||
name-service/program
|
||||
stake-pool/program
|
||||
)
|
||||
set -x
|
||||
rm -rf spl
|
||||
git clone https://github.com/solana-labs/solana-program-library.git spl
|
||||
cd spl
|
||||
|
||||
project_used_solana_version=$(sed -nE 's/solana-sdk = \"[>=<~]*(.*)\"/\1/p' <"token/program/Cargo.toml")
|
||||
echo "used solana version: $project_used_solana_version"
|
||||
if semverGT "$project_used_solana_version" "$solana_ver"; then
|
||||
echo "skip"
|
||||
return
|
||||
fi
|
||||
|
||||
./patch.crates-io.sh "$solana_dir"
|
||||
|
||||
for program in "${PROGRAMS[@]}"; do
|
||||
$cargo_test_sbf --manifest-path "$program"/Cargo.toml
|
||||
done
|
||||
|
||||
# TODO better: `build.rs` for spl-token-cli doesn't seem to properly build
|
||||
# the required programs to run the tests, so instead we run the tests
|
||||
# after we know programs have been built
|
||||
$cargo build
|
||||
$cargo test
|
||||
)
|
||||
}
|
||||
|
||||
openbook_dex() {
|
||||
(
|
||||
set -x
|
||||
rm -rf openbook-dex
|
||||
git clone https://github.com/openbook-dex/program.git openbook-dex
|
||||
cd openbook-dex
|
||||
|
||||
update_solana_dependencies . "$solana_ver"
|
||||
patch_crates_io_solana Cargo.toml "$solana_dir"
|
||||
cat >> Cargo.toml <<EOF
|
||||
anchor-lang = { git = "https://github.com/coral-xyz/anchor.git", branch = "master" }
|
||||
EOF
|
||||
patch_crates_io_solana dex/Cargo.toml "$solana_dir"
|
||||
cat >> dex/Cargo.toml <<EOF
|
||||
anchor-lang = { git = "https://github.com/coral-xyz/anchor.git", branch = "master" }
|
||||
[workspace]
|
||||
exclude = [
|
||||
"crank",
|
||||
"permissioned",
|
||||
]
|
||||
EOF
|
||||
$cargo build
|
||||
|
||||
$cargo_build_sbf \
|
||||
--manifest-path dex/Cargo.toml --no-default-features --features program
|
||||
|
||||
$cargo test \
|
||||
--manifest-path dex/Cargo.toml --no-default-features --features program
|
||||
)
|
||||
}
|
||||
|
||||
_ example_helloworld
|
||||
_ spl
|
||||
_ openbook_dex
|
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
here="$(dirname "$0")"
|
||||
cargo="$(readlink -f "${here}/../cargo")"
|
||||
|
||||
if [[ -z $cargo ]]; then
|
||||
>&2 echo "Failed to find cargo. Mac readlink doesn't support -f. Consider switching
|
||||
to gnu readlink with 'brew install coreutils' and then symlink greadlink as
|
||||
/usr/local/bin/readlink."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -ex
|
||||
|
||||
"$cargo" nightly fmt --all
|
||||
(cd programs/sbf && "$cargo" nightly fmt --all)
|
||||
(cd sdk/cargo-build-sbf/tests/crates/fail && "$cargo" nightly fmt --all)
|
||||
(cd sdk/cargo-build-sbf/tests/crates/noop && "$cargo" nightly fmt --all)
|
||||
(cd storage-bigtable/build-proto && "$cargo" nightly fmt --all)
|
||||
(cd web3.js/test/fixtures/noop-program && "$cargo" nightly fmt --all)
|
|
@ -0,0 +1,62 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
here="$(dirname "$0")"
|
||||
cargo="$(readlink -f "${here}/../cargo")"
|
||||
|
||||
if [[ -z $cargo ]]; then
|
||||
>&2 echo "Failed to find cargo. Mac readlink doesn't support -f. Consider switching
|
||||
to gnu readlink with 'brew install coreutils' and then symlink greadlink as
|
||||
/usr/local/bin/readlink."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -e
|
||||
|
||||
shifted_args=()
|
||||
while [[ -n $1 ]]; do
|
||||
if [[ $1 = -- ]]; then
|
||||
escape_marker=found
|
||||
shift
|
||||
break
|
||||
elif [[ $1 = "--ignore-exit-code" ]]; then
|
||||
ignore=1
|
||||
shift
|
||||
else
|
||||
shifted_args+=("$1")
|
||||
shift
|
||||
fi
|
||||
done
|
||||
|
||||
# When "--" appear at the first and shifted_args is empty, consume it here
|
||||
# to unambiguously pass and use any other "--" for cargo
|
||||
if [[ -n $escape_marker && ${#shifted_args[@]} -gt 0 ]]; then
|
||||
files="${shifted_args[*]}"
|
||||
for file in $files; do
|
||||
if [[ $file = "${file%Cargo.lock}" ]]; then
|
||||
echo "$0: unrecognizable as Cargo.lock path (prepend \"--\"?): $file" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
shifted_args=()
|
||||
else
|
||||
files="$(git ls-files :**Cargo.lock)"
|
||||
fi
|
||||
|
||||
for lock_file in $files; do
|
||||
if [[ -n $CI ]]; then
|
||||
echo "--- [$lock_file]: cargo " "${shifted_args[@]}" "$@"
|
||||
fi
|
||||
|
||||
if (set -x && cd "$(dirname "$lock_file")" && "$cargo" "${shifted_args[@]}" "$@"); then
|
||||
# noop
|
||||
true
|
||||
else
|
||||
failed_exit_code=$?
|
||||
if [[ -n $ignore ]]; then
|
||||
echo "$0: WARN: ignoring last cargo command failed exit code as requested:" $failed_exit_code
|
||||
true
|
||||
else
|
||||
exit $failed_exit_code
|
||||
fi
|
||||
fi
|
||||
done
|
|
@ -0,0 +1,180 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# |cargo install| of the top-level crate will not install binaries for
|
||||
# other workspace crates or native program crates.
|
||||
here="$(dirname "$0")"
|
||||
readlink_cmd="readlink"
|
||||
echo "OSTYPE IS: $OSTYPE"
|
||||
if [[ $OSTYPE == darwin* ]]; then
|
||||
# Mac OS X's version of `readlink` does not support the -f option,
|
||||
# But `greadlink` does, which you can get with `brew install coreutils`
|
||||
readlink_cmd="greadlink"
|
||||
|
||||
if ! command -v ${readlink_cmd} &> /dev/null
|
||||
then
|
||||
echo "${readlink_cmd} could not be found. You may need to install coreutils: \`brew install coreutils\`"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
cargo="$("${readlink_cmd}" -f "${here}/../cargo")"
|
||||
|
||||
set -e
|
||||
|
||||
usage() {
|
||||
exitcode=0
|
||||
if [[ -n "$1" ]]; then
|
||||
exitcode=1
|
||||
echo "Error: $*"
|
||||
fi
|
||||
cat <<EOF
|
||||
usage: $0 [+<cargo version>] [--debug] [--validator-only] <install directory>
|
||||
EOF
|
||||
exit $exitcode
|
||||
}
|
||||
|
||||
maybeRustVersion=
|
||||
installDir=
|
||||
buildVariant=release
|
||||
maybeReleaseFlag=--release
|
||||
validatorOnly=
|
||||
|
||||
while [[ -n $1 ]]; do
|
||||
if [[ ${1:0:1} = - ]]; then
|
||||
if [[ $1 = --debug ]]; then
|
||||
maybeReleaseFlag=
|
||||
buildVariant=debug
|
||||
shift
|
||||
elif [[ $1 = --validator-only ]]; then
|
||||
validatorOnly=true
|
||||
shift
|
||||
else
|
||||
usage "Unknown option: $1"
|
||||
fi
|
||||
elif [[ ${1:0:1} = \+ ]]; then
|
||||
maybeRustVersion=$1
|
||||
shift
|
||||
else
|
||||
installDir=$1
|
||||
shift
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$installDir" ]]; then
|
||||
usage "Install directory not specified"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
installDir="$(mkdir -p "$installDir"; cd "$installDir"; pwd)"
|
||||
mkdir -p "$installDir/bin/deps"
|
||||
|
||||
echo "Install location: $installDir ($buildVariant)"
|
||||
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
SECONDS=0
|
||||
|
||||
if [[ $CI_OS_NAME = windows ]]; then
|
||||
# Limit windows to end-user command-line tools. Full validator support is not
|
||||
# yet available on windows
|
||||
BINS=(
|
||||
cargo-build-bpf
|
||||
cargo-build-sbf
|
||||
cargo-test-bpf
|
||||
cargo-test-sbf
|
||||
solana
|
||||
solana-install
|
||||
solana-install-init
|
||||
solana-keygen
|
||||
solana-stake-accounts
|
||||
solana-test-validator
|
||||
solana-tokens
|
||||
)
|
||||
else
|
||||
./fetch-perf-libs.sh
|
||||
|
||||
BINS=(
|
||||
solana
|
||||
solana-bench-tps
|
||||
solana-faucet
|
||||
solana-gossip
|
||||
solana-install
|
||||
solana-keygen
|
||||
solana-ledger-tool
|
||||
solana-log-analyzer
|
||||
solana-net-shaper
|
||||
solana-sys-tuner
|
||||
solana-validator
|
||||
rbpf-cli
|
||||
)
|
||||
|
||||
# Speed up net.sh deploys by excluding unused binaries
|
||||
if [[ -z "$validatorOnly" ]]; then
|
||||
BINS+=(
|
||||
cargo-build-bpf
|
||||
cargo-build-sbf
|
||||
cargo-test-bpf
|
||||
cargo-test-sbf
|
||||
solana-dos
|
||||
solana-install-init
|
||||
solana-stake-accounts
|
||||
solana-test-validator
|
||||
solana-tokens
|
||||
solana-watchtower
|
||||
)
|
||||
fi
|
||||
|
||||
#XXX: Ensure `solana-genesis` is built LAST!
|
||||
# See https://github.com/solana-labs/solana/issues/5826
|
||||
BINS+=(solana-genesis)
|
||||
fi
|
||||
|
||||
binArgs=()
|
||||
for bin in "${BINS[@]}"; do
|
||||
binArgs+=(--bin "$bin")
|
||||
done
|
||||
|
||||
mkdir -p "$installDir/bin"
|
||||
|
||||
(
|
||||
set -x
|
||||
# shellcheck disable=SC2086 # Don't want to double quote $rust_version
|
||||
"$cargo" $maybeRustVersion build $maybeReleaseFlag "${binArgs[@]}"
|
||||
|
||||
# Exclude `spl-token` binary for net.sh builds
|
||||
if [[ -z "$validatorOnly" ]]; then
|
||||
# shellcheck disable=SC2086 # Don't want to double quote $rust_version
|
||||
"$cargo" $maybeRustVersion install --locked spl-token-cli --root "$installDir"
|
||||
fi
|
||||
)
|
||||
|
||||
for bin in "${BINS[@]}"; do
|
||||
cp -fv "target/$buildVariant/$bin" "$installDir"/bin
|
||||
done
|
||||
|
||||
if [[ -d target/perf-libs ]]; then
|
||||
cp -a target/perf-libs "$installDir"/bin/perf-libs
|
||||
fi
|
||||
|
||||
if [[ -z "$validatorOnly" ]]; then
|
||||
# shellcheck disable=SC2086 # Don't want to double quote $rust_version
|
||||
"$cargo" $maybeRustVersion build --manifest-path programs/bpf_loader/gen-syscall-list/Cargo.toml
|
||||
# shellcheck disable=SC2086 # Don't want to double quote $rust_version
|
||||
"$cargo" $maybeRustVersion run --bin gen-headers
|
||||
mkdir -p "$installDir"/bin/sdk/sbf
|
||||
cp -a sdk/sbf/* "$installDir"/bin/sdk/sbf
|
||||
fi
|
||||
|
||||
(
|
||||
set -x
|
||||
# deps dir can be empty
|
||||
shopt -s nullglob
|
||||
for dep in target/"$buildVariant"/deps/libsolana*program.*; do
|
||||
cp -fv "$dep" "$installDir/bin/deps"
|
||||
done
|
||||
)
|
||||
|
||||
echo "Done after $SECONDS seconds"
|
||||
echo
|
||||
echo "To use these binaries:"
|
||||
echo " export PATH=\"$installDir\"/bin:\"\$PATH\""
|
|
@ -0,0 +1,52 @@
|
|||
# |source| this file
|
||||
#
|
||||
# The SOLANA_METRICS_CONFIG environment variable is formatted as a
|
||||
# comma-delimited list of parameters. All parameters are optional.
|
||||
#
|
||||
# Example:
|
||||
# export SOLANA_METRICS_CONFIG="host=<metrics host>,db=<database name>,u=<username>,p=<password>"
|
||||
#
|
||||
# The following directive disable complaints about unused variables in this
|
||||
# file:
|
||||
# shellcheck disable=2034
|
||||
#
|
||||
|
||||
configureMetrics() {
|
||||
[[ -n $SOLANA_METRICS_CONFIG ]] || return 0
|
||||
|
||||
declare metricsParams
|
||||
IFS=',' read -r -a metricsParams <<< "$SOLANA_METRICS_CONFIG"
|
||||
for param in "${metricsParams[@]}"; do
|
||||
IFS='=' read -r -a pair <<< "$param"
|
||||
if [[ ${#pair[@]} != 2 ]]; then
|
||||
echo Error: invalid metrics parameter: "$param" >&2
|
||||
else
|
||||
declare name="${pair[0]}"
|
||||
declare value="${pair[1]}"
|
||||
case "$name" in
|
||||
host)
|
||||
export INFLUX_HOST="$value"
|
||||
echo INFLUX_HOST="$INFLUX_HOST" >&2
|
||||
;;
|
||||
db)
|
||||
export INFLUX_DATABASE="$value"
|
||||
echo INFLUX_DATABASE="$INFLUX_DATABASE" >&2
|
||||
;;
|
||||
u)
|
||||
export INFLUX_USERNAME="$value"
|
||||
echo INFLUX_USERNAME="$INFLUX_USERNAME" >&2
|
||||
;;
|
||||
p)
|
||||
export INFLUX_PASSWORD="$value"
|
||||
echo INFLUX_PASSWORD="********" >&2
|
||||
;;
|
||||
*)
|
||||
echo Error: Unknown metrics parameter name: "$name" >&2
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
done
|
||||
}
|
||||
configureMetrics
|
||||
|
||||
metricsWriteDatapoint="$(dirname "${BASH_SOURCE[0]}")"/metrics-write-datapoint.sh
|
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
usage: $0 branch tag
|
||||
|
||||
Checks that the tag matches the branch (unless branch is master) and the Cargo.toml versions match the tag.
|
||||
EOF
|
||||
exit 0
|
||||
}
|
||||
|
||||
branch="$1"
|
||||
tag="$2"
|
||||
|
||||
[[ -n $tag ]] || usage
|
||||
echo "branch: $branch tag: $tag"
|
||||
|
||||
# The tag is expected to be the branch name plus a patch number (unless branch is master). eg:
|
||||
# tag: v1.2.3
|
||||
# branch: v1.2
|
||||
if [[ "$tag" != "$branch"* && $branch != "master" ]]; then
|
||||
>&2 echo "Tag must start with the branch name (unless branch is master). Tag: $tag Branch: $branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
here="$(dirname "$0")"
|
||||
cd "$here"/..
|
||||
source scripts/read-cargo-variable.sh
|
||||
|
||||
ignores=(
|
||||
.cache
|
||||
.cargo
|
||||
target
|
||||
web3.js/test
|
||||
node_modules
|
||||
)
|
||||
|
||||
not_paths=()
|
||||
for ignore in "${ignores[@]}"; do
|
||||
not_paths+=(-not -path "*/$ignore/*")
|
||||
done
|
||||
|
||||
# shellcheck disable=2207
|
||||
Cargo_tomls=($(find . -mindepth 2 -name Cargo.toml "${not_paths[@]}"))
|
||||
|
||||
for Cargo_toml in "${Cargo_tomls[@]}"; do
|
||||
manifest_version="$(readCargoVariable version "${Cargo_toml}")"
|
||||
if ! [[ "v$manifest_version" == "$tag" ]]; then
|
||||
>&2 echo "Tag must match the crate version in the manifest files. Mismatch found in $Cargo_toml. Tag: $tag Manifest version: $manifest_version"
|
||||
exit 1
|
||||
else
|
||||
echo "tag matches manifest: $Cargo_toml $manifest_version $tag"
|
||||
fi
|
||||
done
|
|
@ -0,0 +1,134 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs all tests and collects code coverage
|
||||
#
|
||||
# Warning: this process is a little slow
|
||||
#
|
||||
|
||||
if ! command -v grcov; then
|
||||
echo "Error: grcov not found. Try |cargo install grcov|"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$(grcov --version)" =~ 0.8.[0-9] ]]; then
|
||||
echo Error: Required grcov version not installed
|
||||
|
||||
echo "Installed version: $(grcov --version)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")/.."
|
||||
source ci/_
|
||||
|
||||
cargo="$(readlink -f "./cargo")"
|
||||
|
||||
: "${CI_COMMIT:=local}"
|
||||
reportName="lcov-${CI_COMMIT:0:9}"
|
||||
|
||||
if [[ -z $1 ]]; then
|
||||
packages=(--lib --all --exclude solana-local-cluster)
|
||||
else
|
||||
packages=("$@")
|
||||
fi
|
||||
|
||||
coverageFlags=()
|
||||
coverageFlags+=(-Zprofile) # Enable coverage
|
||||
coverageFlags+=("-Aincomplete_features") # Supress warnings due to frozen abi, which is harmless for it
|
||||
if [[ $(uname) != Darwin ]]; then # macOS skipped due to https://github.com/rust-lang/rust/issues/63047
|
||||
coverageFlags+=("-Clink-dead-code") # Dead code should appear red in the report
|
||||
fi
|
||||
coverageFlags+=("-Ccodegen-units=1") # Disable code generation parallelism which is unsupported under -Zprofile (see [rustc issue #51705]).
|
||||
coverageFlags+=("-Cinline-threshold=0") # Disable inlining, which complicates control flow.
|
||||
coverageFlags+=("-Copt-level=0")
|
||||
coverageFlags+=("-Coverflow-checks=off") # Disable overflow checks, which create unnecessary branches.
|
||||
|
||||
export RUSTFLAGS="${coverageFlags[*]} $RUSTFLAGS"
|
||||
export CARGO_INCREMENTAL=0
|
||||
export RUST_BACKTRACE=1
|
||||
export RUST_MIN_STACK=8388608
|
||||
export SOLANA_TEST_ACCOUNTS_INDEX_MEMORY_LIMIT_MB=10000
|
||||
|
||||
echo "--- remove old coverage results"
|
||||
if [[ -d target/cov ]]; then
|
||||
find target/cov -type f -name '*.gcda' -delete
|
||||
fi
|
||||
rm -rf target/cov/$reportName
|
||||
mkdir -p target/cov
|
||||
|
||||
# Mark the base time for a clean room dir
|
||||
touch target/cov/before-test
|
||||
|
||||
# Force rebuild of possibly-cached proc macro crates and build.rs because
|
||||
# we always want stable coverage for them
|
||||
# Don't support odd file names in our repo ever
|
||||
if [[ -n $CI || -z $1 ]]; then
|
||||
# shellcheck disable=SC2046
|
||||
touch \
|
||||
$(git ls-files :**/build.rs) \
|
||||
$(git grep -l "proc-macro.*true" :**/Cargo.toml | sed 's|Cargo.toml|src/lib.rs|')
|
||||
fi
|
||||
|
||||
# limit jobs to 4gb/thread
|
||||
if [[ -f "/proc/meminfo" ]]; then
|
||||
JOBS=$(grep MemTotal /proc/meminfo | awk '{printf "%.0f", ($2 / (4 * 1024 * 1024))}')
|
||||
else
|
||||
JOBS=$(sysctl hw.memsize | awk '{printf "%.0f", ($2 / (4 * 1024**3))}')
|
||||
fi
|
||||
|
||||
NPROC=$(nproc)
|
||||
JOBS=$((JOBS>NPROC ? NPROC : JOBS))
|
||||
|
||||
RUST_LOG=solana=trace _ "$cargo" nightly test --jobs "$JOBS" --target-dir target/cov --no-run "${packages[@]}"
|
||||
if RUST_LOG=solana=trace _ "$cargo" nightly test --jobs "$JOBS" --target-dir target/cov "${packages[@]}" 2> target/cov/coverage-stderr.log; then
|
||||
test_status=0
|
||||
else
|
||||
test_status=$?
|
||||
echo "Failed: $test_status"
|
||||
echo "^^^ +++"
|
||||
if [[ -n $CI ]]; then
|
||||
exit $test_status
|
||||
fi
|
||||
fi
|
||||
touch target/cov/after-test
|
||||
|
||||
echo "--- grcov"
|
||||
|
||||
# Create a clean room dir only with updated gcda/gcno files for this run,
|
||||
# because our cached target dir is full of other builds' coverage files
|
||||
rm -rf target/cov/tmp
|
||||
mkdir -p target/cov/tmp
|
||||
|
||||
# Can't use a simpler construct under the condition of SC2044 and bash 3
|
||||
# (macOS's default). See: https://github.com/koalaman/shellcheck/wiki/SC2044
|
||||
find target/cov -type f -name '*.gcda' -newer target/cov/before-test ! -newer target/cov/after-test -print0 |
|
||||
(while IFS= read -r -d '' gcda_file; do
|
||||
gcno_file="${gcda_file%.gcda}.gcno"
|
||||
ln -sf "../../../$gcda_file" "target/cov/tmp/$(basename "$gcda_file")"
|
||||
ln -sf "../../../$gcno_file" "target/cov/tmp/$(basename "$gcno_file")"
|
||||
done)
|
||||
|
||||
(
|
||||
grcov_args=(
|
||||
target/cov/tmp
|
||||
--llvm
|
||||
--ignore \*.cargo\*
|
||||
--ignore \*build.rs
|
||||
--ignore bench-tps\*
|
||||
--ignore upload-perf\*
|
||||
--ignore bench-streamer\*
|
||||
--ignore local-cluster\*
|
||||
)
|
||||
|
||||
set -x
|
||||
grcov "${grcov_args[@]}" -t html -o target/cov/$reportName
|
||||
grcov "${grcov_args[@]}" -t lcov -o target/cov/lcov.info
|
||||
|
||||
cd target/cov
|
||||
tar zcf report.tar.gz $reportName
|
||||
)
|
||||
|
||||
ls -l target/cov/$reportName/index.html
|
||||
ln -sfT $reportName target/cov/LATEST
|
||||
|
||||
exit $test_status
|
|
@ -0,0 +1,133 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs all tests and collects code coverage
|
||||
#
|
||||
# Warning: this process is a little slow
|
||||
#
|
||||
|
||||
if ! command -v grcov; then
|
||||
echo "Error: grcov not found. Try |cargo install grcov|"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$(grcov --version)" =~ 0.8.[0-9] ]]; then
|
||||
echo Error: Required grcov version not installed
|
||||
|
||||
echo "Installed version: $(grcov --version)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")/.."
|
||||
source ci/_
|
||||
|
||||
cargo="$(readlink -f "./cargo")"
|
||||
|
||||
: "${CI_COMMIT:=local}"
|
||||
reportName="lcov-${CI_COMMIT:0:9}"
|
||||
|
||||
if [[ -z $1 ]]; then
|
||||
packages=(--lib --all --exclude solana-local-cluster)
|
||||
else
|
||||
packages=("$@")
|
||||
fi
|
||||
|
||||
coverageFlags=()
|
||||
coverageFlags+=(-Zprofile) # Enable coverage
|
||||
coverageFlags+=("-Aincomplete_features") # Supress warnings due to frozen abi, which is harmless for it
|
||||
if [[ $(uname) != Darwin ]]; then # macOS skipped due to https://github.com/rust-lang/rust/issues/63047
|
||||
coverageFlags+=("-Clink-dead-code") # Dead code should appear red in the report
|
||||
fi
|
||||
coverageFlags+=("-Ccodegen-units=1") # Disable code generation parallelism which is unsupported under -Zprofile (see [rustc issue #51705]).
|
||||
coverageFlags+=("-Cinline-threshold=0") # Disable inlining, which complicates control flow.
|
||||
coverageFlags+=("-Copt-level=0")
|
||||
coverageFlags+=("-Coverflow-checks=off") # Disable overflow checks, which create unnecessary branches.
|
||||
|
||||
export RUSTFLAGS="${coverageFlags[*]} $RUSTFLAGS"
|
||||
export CARGO_INCREMENTAL=0
|
||||
export RUST_BACKTRACE=1
|
||||
export RUST_MIN_STACK=8388608
|
||||
|
||||
echo "--- remove old coverage results"
|
||||
if [[ -d target/cov ]]; then
|
||||
find target/cov -type f -name '*.gcda' -delete
|
||||
fi
|
||||
rm -rf target/cov/$reportName
|
||||
mkdir -p target/cov
|
||||
|
||||
# Mark the base time for a clean room dir
|
||||
touch target/cov/before-test
|
||||
|
||||
# Force rebuild of possibly-cached proc macro crates and build.rs because
|
||||
# we always want stable coverage for them
|
||||
# Don't support odd file names in our repo ever
|
||||
if [[ -n $CI || -z $1 ]]; then
|
||||
# shellcheck disable=SC2046
|
||||
touch \
|
||||
$(git ls-files :**/build.rs) \
|
||||
$(git grep -l "proc-macro.*true" :**/Cargo.toml | sed 's|Cargo.toml|src/lib.rs|')
|
||||
fi
|
||||
|
||||
# limit jobs to 4gb/thread
|
||||
if [[ -f "/proc/meminfo" ]]; then
|
||||
JOBS=$(grep MemTotal /proc/meminfo | awk '{printf "%.0f", ($2 / (4 * 1024 * 1024))}')
|
||||
else
|
||||
JOBS=$(sysctl hw.memsize | awk '{printf "%.0f", ($2 / (4 * 1024**3))}')
|
||||
fi
|
||||
|
||||
NPROC=$(nproc)
|
||||
JOBS=$((JOBS>NPROC ? NPROC : JOBS))
|
||||
|
||||
RUST_LOG=solana=trace _ "$cargo" nightly test --jobs "$JOBS" --target-dir target/cov --no-run "${packages[@]}"
|
||||
if RUST_LOG=solana=trace _ "$cargo" nightly test --jobs "$JOBS" --target-dir target/cov "${packages[@]}" -- --nocapture 2> >(tee target/cov/coverage-stderr.log >&2); then
|
||||
test_status=0
|
||||
else
|
||||
test_status=$?
|
||||
echo "Failed: $test_status"
|
||||
echo "^^^ +++"
|
||||
if [[ -n $CI ]]; then
|
||||
exit $test_status
|
||||
fi
|
||||
fi
|
||||
touch target/cov/after-test
|
||||
|
||||
echo "--- grcov"
|
||||
|
||||
# Create a clean room dir only with updated gcda/gcno files for this run,
|
||||
# because our cached target dir is full of other builds' coverage files
|
||||
rm -rf target/cov/tmp
|
||||
mkdir -p target/cov/tmp
|
||||
|
||||
# Can't use a simpler construct under the condition of SC2044 and bash 3
|
||||
# (macOS's default). See: https://github.com/koalaman/shellcheck/wiki/SC2044
|
||||
find target/cov -type f -name '*.gcda' -newer target/cov/before-test ! -newer target/cov/after-test -print0 |
|
||||
(while IFS= read -r -d '' gcda_file; do
|
||||
gcno_file="${gcda_file%.gcda}.gcno"
|
||||
ln -sf "../../../$gcda_file" "target/cov/tmp/$(basename "$gcda_file")"
|
||||
ln -sf "../../../$gcno_file" "target/cov/tmp/$(basename "$gcno_file")"
|
||||
done)
|
||||
|
||||
(
|
||||
grcov_args=(
|
||||
target/cov/tmp
|
||||
--llvm
|
||||
--ignore \*.cargo\*
|
||||
--ignore \*build.rs
|
||||
--ignore bench-tps\*
|
||||
--ignore upload-perf\*
|
||||
--ignore bench-streamer\*
|
||||
--ignore local-cluster\*
|
||||
)
|
||||
|
||||
set -x
|
||||
grcov "${grcov_args[@]}" -t html -o target/cov/$reportName
|
||||
grcov "${grcov_args[@]}" -t lcov -o target/cov/lcov.info
|
||||
|
||||
cd target/cov
|
||||
tar zcf report.tar.gz $reportName
|
||||
)
|
||||
|
||||
ls -l target/cov/$reportName/index.html
|
||||
ln -sfT $reportName target/cov/LATEST
|
||||
|
||||
exit $test_status
|
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Reports open file descriptors for the current user
|
||||
#
|
||||
set -e
|
||||
|
||||
[[ $(uname) == Linux ]] || exit 0
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# shellcheck source=scripts/configure-metrics.sh
|
||||
source configure-metrics.sh
|
||||
|
||||
while true; do
|
||||
count=$(lsof -u $UID | wc -l)
|
||||
./metrics-write-datapoint.sh "open-files,hostname=$HOSTNAME count=$count"
|
||||
sleep 10
|
||||
done
|
||||
|
||||
exit 1
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Reports network bandwidth usage
|
||||
#
|
||||
set -e
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <iftop log file> <temp file for interediate data> [optional list of IP address mapping]"
|
||||
echo
|
||||
echo Processes iftop log file, and extracts latest bandwidth used by each connection
|
||||
echo
|
||||
echo
|
||||
}
|
||||
|
||||
if [ "$#" -lt 2 ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
awk '{ if ($3 ~ "=>") { print $2, $7 } else if ($2 ~ "<=") { print $1, $6 }} ' < "$1" \
|
||||
| awk 'NR%2{printf "%s ",$0;next;}1' \
|
||||
| awk '{ print "{ \"a\": \""$1"\", " "\"b\": \""$3"\", \"a_to_b\": \""$2"\", \"b_to_a\": \""$4"\"}," }' > "$2"
|
||||
|
||||
if [ "$#" -lt 3 ]; then
|
||||
solana-log-analyzer iftop -f "$2"
|
||||
else
|
||||
list=$(cat "$3")
|
||||
solana-log-analyzer iftop -f "$2" map-IP --list "$list"
|
||||
fi
|
||||
|
||||
exit 1
|
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Reports network bandwidth usage
|
||||
#
|
||||
set -e
|
||||
|
||||
[[ $(uname) == Linux ]] || exit 0
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
sudo=
|
||||
if sudo true; then
|
||||
sudo="sudo -n"
|
||||
fi
|
||||
# shellcheck disable=SC2086
|
||||
exec $sudo iftop -i "$(ifconfig | grep mtu | grep -iv loopback | grep -i running | awk 'BEGIN { FS = ":" } ; {print $1}')" -nNbBP -t -L 1000
|
|
@ -0,0 +1,144 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
usage: $0 [major|minor|patch|-preXYZ]
|
||||
|
||||
Increments the Cargo.toml version.
|
||||
|
||||
Default:
|
||||
* Removes the prerelease tag if present, otherwise the minor version is incremented.
|
||||
EOF
|
||||
exit 0
|
||||
}
|
||||
|
||||
here="$(dirname "$0")"
|
||||
cd "$here"/..
|
||||
source ci/semver_bash/semver.sh
|
||||
source scripts/read-cargo-variable.sh
|
||||
|
||||
ignores=(
|
||||
.cache
|
||||
.cargo
|
||||
target
|
||||
web3.js/test
|
||||
node_modules
|
||||
)
|
||||
|
||||
not_paths=()
|
||||
for ignore in "${ignores[@]}"; do
|
||||
not_paths+=(-not -path "*/$ignore/*")
|
||||
done
|
||||
|
||||
# shellcheck disable=2207
|
||||
Cargo_tomls=($(find . -mindepth 2 -name Cargo.toml "${not_paths[@]}"))
|
||||
|
||||
# Collect the name of all the internal crates
|
||||
crates=()
|
||||
for Cargo_toml in "${Cargo_tomls[@]}"; do
|
||||
crates+=("$(readCargoVariable name "$Cargo_toml")")
|
||||
done
|
||||
|
||||
# Read the current version
|
||||
MAJOR=0
|
||||
MINOR=0
|
||||
PATCH=0
|
||||
SPECIAL=""
|
||||
|
||||
semverParseInto "$(readCargoVariable version "${Cargo_tomls[0]}")" MAJOR MINOR PATCH SPECIAL
|
||||
[[ -n $MAJOR ]] || usage
|
||||
|
||||
currentVersion="$MAJOR\.$MINOR\.$PATCH$SPECIAL"
|
||||
|
||||
bump=$1
|
||||
if [[ -z $bump ]]; then
|
||||
if [[ -n $SPECIAL ]]; then
|
||||
bump=dropspecial # Remove prerelease tag
|
||||
else
|
||||
bump=minor
|
||||
fi
|
||||
fi
|
||||
SPECIAL=""
|
||||
|
||||
# Figure out what to increment
|
||||
case $bump in
|
||||
patch)
|
||||
PATCH=$((PATCH + 1))
|
||||
;;
|
||||
major)
|
||||
MAJOR=$((MAJOR+ 1))
|
||||
MINOR=0
|
||||
PATCH=0
|
||||
;;
|
||||
minor)
|
||||
MINOR=$((MINOR+ 1))
|
||||
PATCH=0
|
||||
;;
|
||||
dropspecial)
|
||||
;;
|
||||
check)
|
||||
badTomls=()
|
||||
for Cargo_toml in "${Cargo_tomls[@]}"; do
|
||||
if ! grep "^version *= *\"$currentVersion\"$" "$Cargo_toml" &>/dev/null; then
|
||||
badTomls+=("$Cargo_toml")
|
||||
fi
|
||||
done
|
||||
if [[ ${#badTomls[@]} -ne 0 ]]; then
|
||||
echo "Error: Incorrect crate version specified in: ${badTomls[*]}"
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
;;
|
||||
-*)
|
||||
if [[ $1 =~ ^-[A-Za-z0-9]*$ ]]; then
|
||||
SPECIAL="$1"
|
||||
else
|
||||
echo "Error: Unsupported characters found in $1"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "Error: unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
|
||||
# Version bumps should occur in their own commit. Disallow bumping version
|
||||
# in dirty working trees. Gate after arg parsing to prevent breaking the
|
||||
# `check` subcommand.
|
||||
(
|
||||
set +e
|
||||
if ! git diff --exit-code; then
|
||||
echo -e "\nError: Working tree is dirty. Commit or discard changes before bumping version." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
)
|
||||
|
||||
newVersion="$MAJOR.$MINOR.$PATCH$SPECIAL"
|
||||
|
||||
# Update all the Cargo.toml files
|
||||
for Cargo_toml in "${Cargo_tomls[@]}"; do
|
||||
# Set new crate version
|
||||
(
|
||||
set -x
|
||||
sed -i "$Cargo_toml" -e "0,/^version =/{s/^version = \"[^\"]*\"$/version = \"$newVersion\"/}"
|
||||
)
|
||||
|
||||
# Fix up the version references to other internal crates
|
||||
for crate in "${crates[@]}"; do
|
||||
(
|
||||
set -x
|
||||
sed -i "$Cargo_toml" -e "
|
||||
s/^$crate = { *path *= *\"\([^\"]*\)\" *, *version *= *\"[^\"]*\"\(.*\)} *\$/$crate = \{ path = \"\1\", version = \"=$newVersion\"\2\}/
|
||||
"
|
||||
)
|
||||
done
|
||||
done
|
||||
|
||||
# Update cargo lock files
|
||||
scripts/cargo-for-all-lock-files.sh tree >/dev/null
|
||||
|
||||
echo "$currentVersion -> $newVersion"
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Send a metrics datapoint
|
||||
#
|
||||
|
||||
point=$1
|
||||
if [[ -z $point ]]; then
|
||||
echo "Data point not specified"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[$(date -u +"%Y-%m-%dT%H:%M:%SZ")] Influx data point: $point"
|
||||
if [[ -z $INFLUX_DATABASE || -z $INFLUX_USERNAME || -z $INFLUX_PASSWORD ]]; then
|
||||
echo Influx user credentials not found
|
||||
exit 0
|
||||
fi
|
||||
|
||||
host="https://internal-metrics.solana.com:8086"
|
||||
|
||||
if [[ -n $INFLUX_HOST ]]; then
|
||||
host="$INFLUX_HOST"
|
||||
fi
|
||||
|
||||
echo "${host}/write?db=${INFLUX_DATABASE}&u=${INFLUX_USERNAME}&p=${INFLUX_PASSWORD}" \
|
||||
| xargs curl --max-time 5 --silent --show-error -XPOST --data-binary "$point"
|
||||
exit 0
|
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Start/Stop network shaper
|
||||
#
|
||||
set -e
|
||||
|
||||
[[ $(uname) == Linux ]] || exit 0
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
sudo=
|
||||
if sudo true; then
|
||||
sudo="sudo -n"
|
||||
fi
|
||||
|
||||
set -x
|
||||
|
||||
iface="$(ip link show | grep mtu | grep -iv loopback | grep "state UP" | awk 'BEGIN { FS = ": " } ; {print $2}')"
|
||||
|
||||
if [[ "$1" = cleanup ]]; then
|
||||
$sudo ~solana/.cargo/bin/solana-net-shaper cleanup -f "$2" -s "$3" -p "$4" -i "$iface"
|
||||
else
|
||||
$sudo ~solana/.cargo/bin/solana-net-shaper shape -f "$2" -s "$3" -p "$4" -i "$iface"
|
||||
fi
|
|
@ -0,0 +1,80 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Reports network statistics
|
||||
#
|
||||
set -e
|
||||
|
||||
[[ $(uname) == Linux ]] || exit 0
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# shellcheck source=scripts/configure-metrics.sh
|
||||
source configure-metrics.sh
|
||||
|
||||
packets_sent=0
|
||||
packets_sent_diff=0
|
||||
packets_received=0
|
||||
packets_received_diff=0
|
||||
receive_errors=0
|
||||
receive_errors_diff=0
|
||||
receive_buffer_errors=0
|
||||
receive_buffer_errors_diff=0
|
||||
send_buffer_errors=0
|
||||
send_buffer_errors_diff=0
|
||||
rcvbuf_errors=0
|
||||
rcvbuf_errors_diff=0
|
||||
in_octets=0
|
||||
in_octets_diff=0
|
||||
out_octets=0
|
||||
out_octets_diff=0
|
||||
|
||||
update_netstat() {
|
||||
declare net_stat
|
||||
net_stat=$(netstat -suna)
|
||||
|
||||
declare stats
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /packets sent/ {tmp_var = $1} END { print tmp_var }')
|
||||
packets_sent_diff=$((stats - packets_sent))
|
||||
packets_sent="$stats"
|
||||
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /packets received/ {tmp_var = $1} END { print tmp_var }')
|
||||
packets_received_diff=$((stats - packets_received))
|
||||
packets_received="$stats"
|
||||
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /packet receive errors/ {tmp_var = $1} END { print tmp_var }')
|
||||
receive_errors_diff=$((stats - receive_errors))
|
||||
receive_errors="$stats"
|
||||
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /receive buffer errors/ {tmp_var = $1} END { print tmp_var }')
|
||||
receive_buffer_errors_diff=$((stats - receive_buffer_errors))
|
||||
receive_buffer_errors="$stats"
|
||||
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /send buffer errors/ {tmp_var = $1} END { print tmp_var }')
|
||||
send_buffer_errors_diff=$((stats - send_buffer_errors))
|
||||
send_buffer_errors="$stats"
|
||||
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /RcvbufErrors/ {tmp_var = $2} END { print tmp_var }')
|
||||
rcvbuf_errors_diff=$((stats - rcvbuf_errors))
|
||||
rcvbuf_errors="$stats"
|
||||
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /InOctets/ {tmp_var = $2} END { print tmp_var }')
|
||||
in_octets_diff=$((stats - in_octets))
|
||||
in_octets="$stats"
|
||||
|
||||
stats=$(echo "$net_stat" | awk 'BEGIN {tmp_var = 0} /OutOctets/ {tmp_var = $2} END { print tmp_var }')
|
||||
out_octets_diff=$((stats - out_octets))
|
||||
out_octets="$stats"
|
||||
}
|
||||
|
||||
update_netstat
|
||||
|
||||
while true; do
|
||||
update_netstat
|
||||
report="packets_sent=$packets_sent_diff,packets_received=$packets_received_diff,receive_errors=$receive_errors_diff,receive_buffer_errors=$receive_buffer_errors_diff,send_buffer_errors=$send_buffer_errors_diff,rcvbuf_errors=$rcvbuf_errors_diff,in_octets=$in_octets_diff,out_octets=$out_octets_diff"
|
||||
|
||||
echo "$report"
|
||||
./metrics-write-datapoint.sh "net-stats,hostname=$HOSTNAME $report"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
exit 1
|
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Start/Stop network emulation
|
||||
#
|
||||
set -e
|
||||
|
||||
[[ $(uname) == Linux ]] || exit 0
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
sudo=
|
||||
if sudo true; then
|
||||
sudo="sudo -n"
|
||||
fi
|
||||
|
||||
set -x
|
||||
|
||||
iface="$(ifconfig | grep mtu | grep -iv loopback | grep -i running | awk 'BEGIN { FS = ":" } ; {print $1}')"
|
||||
|
||||
if [[ "$1" = delete ]]; then
|
||||
$sudo iptables -F -t mangle
|
||||
else
|
||||
$sudo iptables -A POSTROUTING -t mangle -p udp -j MARK --set-mark 1
|
||||
fi
|
||||
|
||||
$sudo tc qdisc "$1" dev "$iface" root handle 1: prio
|
||||
# shellcheck disable=SC2086 # Do not want to quote $2. It has space separated arguments for netem
|
||||
$sudo tc qdisc "$1" dev "$iface" parent 1:3 handle 30: netem $2
|
||||
$sudo tc filter "$1" dev "$iface" parent 1:0 protocol ip prio 3 handle 1 fw flowid 1:3
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Reports Linux OOM Killer activity
|
||||
#
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# shellcheck source=scripts/oom-score-adj.sh
|
||||
source oom-score-adj.sh
|
||||
|
||||
# shellcheck source=scripts/configure-metrics.sh
|
||||
source configure-metrics.sh
|
||||
|
||||
[[ $(uname) = Linux ]] || exit 0
|
||||
|
||||
syslog=/var/log/syslog
|
||||
[[ -r $syslog ]] || {
|
||||
echo Unable to read $syslog
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Adjust OOM score to reduce the chance that this script will be killed
|
||||
# during an Out of Memory event since the purpose of this script is to
|
||||
# report such events
|
||||
oom_score_adj "self" -500
|
||||
|
||||
while read -r victim; do
|
||||
echo "Out of memory event detected, $victim killed"
|
||||
./metrics-write-datapoint.sh "oom-killer,victim=$victim,hostname=$HOSTNAME killed=1"
|
||||
done < <( \
|
||||
tail --follow=name --retry -n0 $syslog \
|
||||
| sed --unbuffered -n "s/^.* earlyoom\[[0-9]*\]: Killing process .\(.*\). with signal .*/\1/p" \
|
||||
)
|
||||
|
||||
exit 1
|
|
@ -0,0 +1,20 @@
|
|||
# |source| this file
|
||||
#
|
||||
# Adjusts the OOM score for the specified process. Linux only
|
||||
#
|
||||
# usage: oom_score_adj [pid] [score]
|
||||
#
|
||||
oom_score_adj() {
|
||||
declare pid=$1
|
||||
declare score=$2
|
||||
if [[ $(uname) != Linux ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
echo "$score" > "/proc/$pid/oom_score_adj" || true
|
||||
declare currentScore
|
||||
currentScore=$(cat "/proc/$pid/oom_score_adj" || true)
|
||||
if [[ $score != "$currentScore" ]]; then
|
||||
echo "Failed to set oom_score_adj to $score for pid $pid (current score: $currentScore)"
|
||||
fi
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
# source this file
|
||||
|
||||
update_solana_dependencies() {
|
||||
declare project_root="$1"
|
||||
declare solana_ver="$2"
|
||||
declare tomls=()
|
||||
while IFS='' read -r line; do tomls+=("$line"); done < <(find "$project_root" -name Cargo.toml)
|
||||
|
||||
sed -i -e "s#\(solana-program = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-program-test = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-sdk = \"\).*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-sdk = { version = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-client = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-client = { version = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-clap-utils = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-clap-utils = { version = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-account-decoder = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-account-decoder = { version = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-faucet = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-faucet = { version = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-zk-token-sdk = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
sed -i -e "s#\(solana-zk-token-sdk = { version = \"\)[^\"]*\(\"\)#\1=$solana_ver\2#g" "${tomls[@]}" || return $?
|
||||
}
|
||||
|
||||
patch_crates_io_solana() {
|
||||
declare Cargo_toml="$1"
|
||||
declare solana_dir="$2"
|
||||
cat >> "$Cargo_toml" <<EOF
|
||||
[patch.crates-io]
|
||||
solana-account-decoder = { path = "$solana_dir/account-decoder" }
|
||||
solana-clap-utils = { path = "$solana_dir/clap-utils" }
|
||||
solana-client = { path = "$solana_dir/client" }
|
||||
solana-program = { path = "$solana_dir/sdk/program" }
|
||||
solana-program-test = { path = "$solana_dir/program-test" }
|
||||
solana-sdk = { path = "$solana_dir/sdk" }
|
||||
solana-faucet = { path = "$solana_dir/faucet" }
|
||||
solana-zk-token-sdk = { path = "$solana_dir/zk-token-sdk" }
|
||||
EOF
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import matplotlib
|
||||
matplotlib.use('Agg')
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import json
|
||||
import sys
|
||||
|
||||
stages_to_counters = {}
|
||||
stages_to_time = {}
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print(f"USAGE: {sys.argv[0]} <input file>")
|
||||
sys.exit(1)
|
||||
|
||||
with open(sys.argv[1]) as fh:
|
||||
for line in fh.readlines():
|
||||
if "COUNTER" in line:
|
||||
json_part = line[line.find("{"):]
|
||||
x = json.loads(json_part)
|
||||
counter = x['name']
|
||||
if not (counter in stages_to_counters):
|
||||
stages_to_counters[counter] = []
|
||||
stages_to_time[counter] = []
|
||||
stages_to_counters[counter].append(x['counts'])
|
||||
stages_to_time[counter].append(x['now'])
|
||||
|
||||
fig, ax = plt.subplots()
|
||||
|
||||
for stage in stages_to_counters.keys():
|
||||
plt.plot(stages_to_time[stage], stages_to_counters[stage], label=stage)
|
||||
|
||||
plt.xlabel('ms')
|
||||
plt.ylabel('count')
|
||||
|
||||
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
|
||||
ncol=2, mode="expand", borderaxespad=0.)
|
||||
|
||||
plt.locator_params(axis='x', nbins=10)
|
||||
plt.grid(True)
|
||||
|
||||
plt.savefig("perf.pdf")
|
|
@ -0,0 +1,70 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
stages_data = {}
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("USAGE: {} <input file>".format(sys.argv[0]))
|
||||
sys.exit(1)
|
||||
|
||||
with open(sys.argv[1]) as fh:
|
||||
for line in fh.readlines():
|
||||
if "COUNTER" in line:
|
||||
json_part = line[line.find("{"):]
|
||||
x = json.loads(json_part)
|
||||
counter = x['name']
|
||||
if not (counter in stages_data):
|
||||
stages_data[counter] = {'first_ts': x['now'], 'last_ts': x['now'], 'last_count': 0,
|
||||
'data': [], 'max_speed': 0, 'min_speed': 9999999999.0,
|
||||
'count': 0,
|
||||
'max_speed_ts': 0, 'min_speed_ts': 0}
|
||||
stages_data[counter]['count'] += 1
|
||||
count_since_last = x['counts'] - stages_data[counter]['last_count']
|
||||
time_since_last = float(x['now'] - stages_data[counter]['last_ts'])
|
||||
if time_since_last > 1:
|
||||
speed = 1000.0 * (count_since_last / time_since_last)
|
||||
stages_data[counter]['data'].append(speed)
|
||||
if speed > stages_data[counter]['max_speed']:
|
||||
stages_data[counter]['max_speed'] = speed
|
||||
stages_data[counter]['max_speed_ts'] = x['now']
|
||||
if speed < stages_data[counter]['min_speed']:
|
||||
stages_data[counter]['min_speed'] = speed
|
||||
stages_data[counter]['min_speed_ts'] = x['now']
|
||||
stages_data[counter]['last_ts'] = x['now']
|
||||
stages_data[counter]['last_count'] = x['counts']
|
||||
|
||||
for stage in stages_data.keys():
|
||||
stages_data[stage]['data'].sort()
|
||||
#mean_index = stages_data[stage]['count'] / 2
|
||||
mean = 0
|
||||
average = 0
|
||||
eightieth = 0
|
||||
data_len = len(stages_data[stage]['data'])
|
||||
mean_index = int(data_len / 2)
|
||||
eightieth_index = int(data_len * 0.8)
|
||||
#print("mean idx: {} data.len: {}".format(mean_index, data_len))
|
||||
if data_len > 0:
|
||||
mean = stages_data[stage]['data'][mean_index]
|
||||
average = float(sum(stages_data[stage]['data'])) / data_len
|
||||
eightieth = stages_data[stage]['data'][eightieth_index]
|
||||
print("stage: {} max: {:,.2f} min: {:.2f} count: {} total: {} mean: {:,.2f} average: {:,.2f} 80%: {:,.2f}".format(stage,
|
||||
stages_data[stage]['max_speed'],
|
||||
stages_data[stage]['min_speed'],
|
||||
stages_data[stage]['count'],
|
||||
stages_data[stage]['last_count'],
|
||||
mean, average, eightieth))
|
||||
num = 5
|
||||
idx = -1
|
||||
if data_len >= num:
|
||||
print(" top {}: ".format(num), end='')
|
||||
for x in range(0, num):
|
||||
print("{:,.2f} ".format(stages_data[stage]['data'][idx]), end='')
|
||||
idx -= 1
|
||||
if stages_data[stage]['data'][idx] < average:
|
||||
break
|
||||
print("")
|
||||
print(" max_ts: {} min_ts: {}".format(stages_data[stage]['max_speed_ts'], stages_data[stage]['min_speed_ts']))
|
||||
print("\n")
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# source this file
|
||||
|
||||
readCargoVariable() {
|
||||
declare variable="$1"
|
||||
declare Cargo_toml="$2"
|
||||
|
||||
while read -r name equals value _; do
|
||||
if [[ $name = "$variable" && $equals = = ]]; then
|
||||
echo "${value//\"/}"
|
||||
return
|
||||
fi
|
||||
done < <(cat "$Cargo_toml")
|
||||
echo "Unable to locate $variable in $Cargo_toml" 1>&2
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Run a minimal Solana cluster. Ctrl-C to exit.
|
||||
#
|
||||
# Before running this script ensure standard Solana programs are available
|
||||
# in the PATH, or that `cargo build` ran successfully
|
||||
#
|
||||
set -e
|
||||
|
||||
# Prefer possible `cargo build` binaries over PATH binaries
|
||||
script_dir="$(readlink -f "$(dirname "$0")")"
|
||||
if [[ "$script_dir" =~ /scripts$ ]]; then
|
||||
cd "$script_dir/.."
|
||||
else
|
||||
cd "$script_dir"
|
||||
fi
|
||||
|
||||
|
||||
profile=debug
|
||||
if [[ -n $NDEBUG ]]; then
|
||||
profile=release
|
||||
fi
|
||||
PATH=$PWD/target/$profile:$PATH
|
||||
|
||||
ok=true
|
||||
for program in solana-{faucet,genesis,keygen,validator}; do
|
||||
$program -V || ok=false
|
||||
done
|
||||
$ok || {
|
||||
echo
|
||||
echo "Unable to locate required programs. Try building them first with:"
|
||||
echo
|
||||
echo " $ cargo build --all"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
export RUST_LOG=${RUST_LOG:-solana=info,solana_runtime::message_processor=debug} # if RUST_LOG is unset, default to info
|
||||
export RUST_BACKTRACE=1
|
||||
dataDir=$PWD/config/"$(basename "$0" .sh)"
|
||||
ledgerDir=$PWD/config/ledger
|
||||
|
||||
SOLANA_RUN_SH_CLUSTER_TYPE=${SOLANA_RUN_SH_CLUSTER_TYPE:-development}
|
||||
|
||||
set -x
|
||||
if ! solana address; then
|
||||
echo Generating default keypair
|
||||
solana-keygen new --no-passphrase
|
||||
fi
|
||||
validator_identity="$dataDir/validator-identity.json"
|
||||
if [[ -e $validator_identity ]]; then
|
||||
echo "Use existing validator keypair"
|
||||
else
|
||||
solana-keygen new --no-passphrase -so "$validator_identity"
|
||||
fi
|
||||
validator_vote_account="$dataDir/validator-vote-account.json"
|
||||
if [[ -e $validator_vote_account ]]; then
|
||||
echo "Use existing validator vote account keypair"
|
||||
else
|
||||
solana-keygen new --no-passphrase -so "$validator_vote_account"
|
||||
fi
|
||||
validator_stake_account="$dataDir/validator-stake-account.json"
|
||||
if [[ -e $validator_stake_account ]]; then
|
||||
echo "Use existing validator stake account keypair"
|
||||
else
|
||||
solana-keygen new --no-passphrase -so "$validator_stake_account"
|
||||
fi
|
||||
|
||||
if [[ -e "$ledgerDir"/genesis.bin || -e "$ledgerDir"/genesis.tar.bz2 ]]; then
|
||||
echo "Use existing genesis"
|
||||
else
|
||||
./fetch-spl.sh
|
||||
if [[ -r spl-genesis-args.sh ]]; then
|
||||
SPL_GENESIS_ARGS=$(cat spl-genesis-args.sh)
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
solana-genesis \
|
||||
--hashes-per-tick sleep \
|
||||
--faucet-lamports 500000000000000000 \
|
||||
--bootstrap-validator \
|
||||
"$validator_identity" \
|
||||
"$validator_vote_account" \
|
||||
"$validator_stake_account" \
|
||||
--ledger "$ledgerDir" \
|
||||
--cluster-type "$SOLANA_RUN_SH_CLUSTER_TYPE" \
|
||||
$SPL_GENESIS_ARGS \
|
||||
$SOLANA_RUN_SH_GENESIS_ARGS
|
||||
fi
|
||||
|
||||
abort() {
|
||||
set +e
|
||||
kill "$faucet" "$validator"
|
||||
wait "$validator"
|
||||
}
|
||||
trap abort INT TERM EXIT
|
||||
|
||||
solana-faucet &
|
||||
faucet=$!
|
||||
|
||||
args=(
|
||||
--identity "$validator_identity"
|
||||
--vote-account "$validator_vote_account"
|
||||
--ledger "$ledgerDir"
|
||||
--gossip-port 8001
|
||||
--full-rpc-api
|
||||
--rpc-port 8899
|
||||
--log validator.log
|
||||
--rpc-faucet-address 127.0.0.1:9900
|
||||
--enable-rpc-transaction-history
|
||||
--enable-extended-tx-metadata-storage
|
||||
--init-complete-file "$dataDir"/init-completed
|
||||
--snapshot-compression none
|
||||
--require-tower
|
||||
--no-wait-for-vote-to-start-leader
|
||||
--no-os-network-limits-test
|
||||
--rpc-pubsub-enable-block-subscription
|
||||
)
|
||||
# shellcheck disable=SC2086
|
||||
solana-validator "${args[@]}" $SOLANA_RUN_SH_VALIDATOR_ARGS &
|
||||
validator=$!
|
||||
|
||||
wait "$validator"
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
# rust-analyzer doesn't support hiding noisy test calls in the call hierarchy from tests/benches
|
||||
# so, here's some wild hack from ryoqun!
|
||||
|
||||
if [[ $1 = "doit" ]]; then
|
||||
# it's true that we put true just for truely-aligned lines
|
||||
# shellcheck disable=SC2046 # our rust files are sanely named with no need to escape
|
||||
true &&
|
||||
sed -i -e 's/#\[cfg(test)\]/#[cfg(escaped_cfg_test)]/g' $(git ls-files :**.rs :^**/build.rs) &&
|
||||
sed -i -e 's/#\[bench\]/#[cfg(escaped_bench)]/g' $(git ls-files :**.rs :^**/build.rs) &&
|
||||
sed -i -e 's/#\[test\]/#[cfg(escaped_test)]/g' $(git ls-files :**.rs :^**/build.rs) &&
|
||||
sed -i -e 's/#\[tokio::test\]/#[cfg(escaped_tokio_test)]/g' $(git ls-files :**.rs :^**/build.rs)
|
||||
elif [[ $1 = "undoit" ]]; then
|
||||
# shellcheck disable=SC2046 # our rust files are sanely named with no need to escape
|
||||
true &&
|
||||
sed -i -e 's/#\[cfg(escaped_cfg_test)\]/#[cfg(test)]/g' $(git ls-files :**.rs :^**/build.rs) &&
|
||||
sed -i -e 's/#\[cfg(escaped_bench)\]/#[bench]/g' $(git ls-files :**.rs :^**/build.rs) &&
|
||||
sed -i -e 's/#\[cfg(escaped_test)\]/#[test]/g' $(git ls-files :**.rs :^**/build.rs) &&
|
||||
sed -i -e 's/#\[cfg(escaped_tokio_test)\]/#[tokio::test]/g' $(git ls-files :**.rs :^**/build.rs)
|
||||
else
|
||||
echo "usage: $0 [doit|undoit]" > /dev/stderr
|
||||
exit 1
|
||||
fi
|
|
@ -0,0 +1,79 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Convenience script to easily deploy a software update to a testnet
|
||||
#
|
||||
set -e
|
||||
SOLANA_ROOT="$(cd "$(dirname "$0")"/..; pwd)"
|
||||
|
||||
maybeKeypair=
|
||||
while [[ ${1:0:2} = -- ]]; do
|
||||
if [[ $1 = --keypair && -n $2 ]]; then
|
||||
maybeKeypair="$1 $2"
|
||||
shift 2
|
||||
else
|
||||
echo "Error: Unknown option: $1"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
URL=$1
|
||||
TAG=$2
|
||||
OS=${3:-linux}
|
||||
|
||||
if [[ -z $URL || -z $TAG ]]; then
|
||||
echo "Usage: $0 [stable|localhost|RPC URL] [edge|beta|release tag] [linux|osx|windows]"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ ! -f update_manifest_keypair.json ]]; then
|
||||
"$SOLANA_ROOT"/scripts/solana-install-update-manifest-keypair.sh "$OS"
|
||||
fi
|
||||
|
||||
case "$OS" in
|
||||
osx)
|
||||
TARGET=x86_64-apple-darwin
|
||||
;;
|
||||
linux)
|
||||
TARGET=x86_64-unknown-linux-gnu
|
||||
;;
|
||||
windows)
|
||||
TARGET=x86_64-pc-windows-msvc
|
||||
;;
|
||||
*)
|
||||
TARGET=unknown-unknown-unknown
|
||||
;;
|
||||
esac
|
||||
|
||||
case $URL in
|
||||
stable)
|
||||
URL=http://api.devnet.solana.com
|
||||
;;
|
||||
localhost)
|
||||
URL=http://localhost:8899
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
case $TAG in
|
||||
edge|beta)
|
||||
DOWNLOAD_URL=https://release.solana.com/"$TAG"/solana-release-$TARGET.tar.bz2
|
||||
;;
|
||||
*)
|
||||
DOWNLOAD_URL=https://github.com/solana-labs/solana/releases/download/"$TAG"/solana-release-$TARGET.tar.bz2
|
||||
;;
|
||||
esac
|
||||
|
||||
# Prefer possible `cargo build` binaries over PATH binaries
|
||||
PATH="$SOLANA_ROOT"/target/debug:$PATH
|
||||
|
||||
set -x
|
||||
# shellcheck disable=SC2086 # Don't want to double quote $maybeKeypair
|
||||
balance=$(solana $maybeKeypair --url "$URL" balance --lamports)
|
||||
if [[ $balance = "0 lamports" ]]; then
|
||||
# shellcheck disable=SC2086 # Don't want to double quote $maybeKeypair
|
||||
solana $maybeKeypair --url "$URL" airdrop 0.000000042
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086 # Don't want to double quote $maybeKeypair
|
||||
solana-install deploy $maybeKeypair --url "$URL" "$DOWNLOAD_URL" update_manifest_keypair.json
|
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Creates update_manifest_keypair.json based on the current platform and
|
||||
# environment
|
||||
#
|
||||
set -e
|
||||
|
||||
OS=${1:-linux}
|
||||
|
||||
case "$OS" in
|
||||
osx)
|
||||
TARGET=$(uname -m)-apple-darwin
|
||||
;;
|
||||
linux)
|
||||
TARGET=x86_64-unknown-linux-gnu
|
||||
;;
|
||||
windows)
|
||||
TARGET=x86_64-pc-windows-msvc
|
||||
;;
|
||||
*)
|
||||
TARGET=unknown-unknown-unknown
|
||||
;;
|
||||
esac
|
||||
|
||||
SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR="SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR_${TARGET//-/_}"
|
||||
|
||||
# shellcheck disable=2154 # is referenced but not assigned
|
||||
if [[ -z ${!SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR} ]]; then
|
||||
echo "$SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR not defined"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "${!SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR}" > update_manifest_keypair.json
|
||||
ls -l update_manifest_keypair.json
|
|
@ -0,0 +1,45 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Reports cpu and ram usage statistics
|
||||
#
|
||||
set -e
|
||||
|
||||
[[ $(uname) == Linux ]] || exit 0
|
||||
|
||||
# need to cd like this to avoid #SC1091
|
||||
cd "$(dirname "$0")/.."
|
||||
source scripts/configure-metrics.sh
|
||||
|
||||
while true; do
|
||||
# collect top twice because the first time is inaccurate
|
||||
top_ouput="$(top -bn2 -d1)"
|
||||
# collect the total cpu usage by subtracting idle usage from 100%
|
||||
cpu_usage=$(echo "${top_ouput}" | grep '%Cpu(s):' | sed "s/.*, *\([0-9.]*\)%* id.*/\1/" | tail -1 | awk '{print 100 - $1}')
|
||||
# collect the total ram usage by dividing used memory / total memory
|
||||
ram_total_and_usage=$(echo "${top_ouput}" | grep '.*B Mem'| tail -1 | sed "s/.*: *\([0-9.]*\)%* total.*, *\([0-9.]*\)%* used.*/\1 \2/")
|
||||
read -r total used <<< "$ram_total_and_usage"
|
||||
ram_usage=$(awk "BEGIN {print $used / $total * 100}")
|
||||
cpu_report="cpu_usage=$cpu_usage,ram_usage=$ram_usage"
|
||||
|
||||
# if nvidia-smi exists, report gpu stats
|
||||
gpu_report=""
|
||||
if [ -x "$(command -v nvidia-smi)" ]; then
|
||||
mapfile -t individual_gpu_usage < <(nvidia-smi --query-gpu=utilization.gpu,memory.used,memory.total --format=csv,nounits,noheader)
|
||||
total_gpu_usage=0
|
||||
total_gpu_mem_usage=0
|
||||
num_gpus=${#individual_gpu_usage[@]}
|
||||
for entry in "${individual_gpu_usage[@]}"
|
||||
do
|
||||
read -r compute mem_used mem_total <<< "${entry//,/}"
|
||||
total_gpu_usage=$(awk "BEGIN {print $total_gpu_usage + $compute }")
|
||||
total_gpu_mem_usage=$(awk "BEGIN {print $total_gpu_mem_usage + $mem_used / $mem_total * 100}")
|
||||
done
|
||||
avg_gpu_usage=$(awk "BEGIN {print $total_gpu_usage / $num_gpus}")
|
||||
avg_gpu_mem_usage=$(awk "BEGIN {print $total_gpu_mem_usage / $num_gpus}")
|
||||
gpu_report=",avg_gpu_usage=$avg_gpu_usage,avg_gpu_mem_usage=$avg_gpu_mem_usage"
|
||||
fi
|
||||
|
||||
report="${cpu_report}${gpu_report}"
|
||||
./scripts/metrics-write-datapoint.sh "system-stats,hostname=$HOSTNAME $report"
|
||||
sleep 1
|
||||
done
|
|
@ -0,0 +1,15 @@
|
|||
# |source| this file
|
||||
#
|
||||
# Adjust the maximum number of files that may be opened to as large as possible.
|
||||
#
|
||||
|
||||
maxOpenFds=65000
|
||||
|
||||
if [[ $(ulimit -n) -lt $maxOpenFds ]]; then
|
||||
ulimit -n $maxOpenFds 2>/dev/null || {
|
||||
echo "Error: nofiles too small: $(ulimit -n). Failed to run \"ulimit -n $maxOpenFds\"";
|
||||
if [[ $(uname) = Darwin ]]; then
|
||||
echo "Try running |sudo launchctl limit maxfiles 65536 200000| first"
|
||||
fi
|
||||
}
|
||||
fi
|
|
@ -0,0 +1,48 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# solana-cli integration sanity test
|
||||
#
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
# shellcheck source=multinode-demo/common.sh
|
||||
source multinode-demo/common.sh
|
||||
|
||||
if [[ -z $1 ]]; then # no network argument, use localhost by default
|
||||
args=(--url http://127.0.0.1:8899)
|
||||
else
|
||||
args=("$@")
|
||||
fi
|
||||
|
||||
args+=(--keypair "$SOLANA_CONFIG_DIR"/faucet.json)
|
||||
|
||||
node_readiness=false
|
||||
timeout=60
|
||||
while [[ $timeout -gt 0 ]]; do
|
||||
set +e
|
||||
output=$($solana_cli "${args[@]}" transaction-count --commitment finalized)
|
||||
rc=$?
|
||||
set -e
|
||||
if [[ $rc -eq 0 && -n $output ]]; then
|
||||
node_readiness=true
|
||||
break
|
||||
fi
|
||||
sleep 2
|
||||
(( timeout=timeout-2 ))
|
||||
done
|
||||
if ! "$node_readiness"; then
|
||||
echo "Timed out waiting for cluster to start"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
(
|
||||
set -x
|
||||
$solana_cli "${args[@]}" address
|
||||
$solana_cli "${args[@]}" balance
|
||||
$solana_cli "${args[@]}" ping --count 5 --interval 0
|
||||
$solana_cli "${args[@]}" balance
|
||||
)
|
||||
|
||||
echo PASS
|
||||
exit 0
|
Loading…
Reference in New Issue