Minor repairs and fix BIP39 and add tests (#310)

* Logging of plot num, num proofs found, and fix rpc edge case
* Revert add -> append, and python black
* Fix list/set issue
* Correct BIP39 spec and test vectors
* Fix create mnemonic UI
This commit is contained in:
Mariano Sorgente 2020-07-13 20:59:17 -07:00 committed by Gene Hoffman
parent ca4c6ae1a8
commit 550642c789
14 changed files with 295 additions and 60 deletions

View File

@ -94,7 +94,7 @@ export const incomingReducer = (state = { ...initial_state }, action) => {
let success, id, wallet, wallets;
if (command === "generate_mnemonic") {
var mnemonic_data = message.data.mnemonic;
return { ...state, mnemonic: mnemonic_data };
return { ...state, mnemonic: mnemonic_data.split(" ") };
} else if (command === "add_key") {
success = data.success;
return { ...state, logged_in: success };

View File

@ -4,7 +4,6 @@ from src.util.keychain import (
generate_mnemonic,
bytes_to_mnemonic,
Keychain,
bytes_from_mnemonic,
)
from src.wallet.derive_keys import (
master_sk_to_pool_sk,
@ -102,9 +101,8 @@ def add_private_key_seed(mnemonic):
"""
try:
entropy = bytes_from_mnemonic(mnemonic)
passphrase = ""
sk = keychain.add_private_key(entropy, passphrase)
sk = keychain.add_private_key(mnemonic, passphrase)
fingerprint = sk.get_g1().get_fingerprint()
print(
f"Added private key with public key fingerprint {fingerprint} and mnemonic"
@ -116,15 +114,16 @@ def add_private_key_seed(mnemonic):
return
def mnemonic_to_string(mnemonic):
def mnemonic_to_string(mnemonic_str):
"""
Converts a menmonic to a user readable string in the terminal.
"""
mnemonic = mnemonic_str.split()
mnemonics_string = ""
for i in range(0, 24):
for i in range(0, len(mnemonic)):
mnemonics_string += f"{i + 1}) {mnemonic[i]}"
if i != 23:
if i != len(mnemonic) - 1:
mnemonics_string += ", "
if (i + 1) % 6 == 0:
mnemonics_string += "\n"
@ -205,7 +204,7 @@ def handler(args, parser):
elif command == "show":
show_all_keys()
elif command == "add":
add_private_key_seed(args.mnemonic)
add_private_key_seed(" ".join(args.mnemonic))
check_keys(root_path)
elif command == "delete":
delete(args)

View File

@ -241,15 +241,18 @@ class Harvester:
awaitables.append(lookup_challenge(filename, plot_info.prover))
# Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism
total_proofs_found = 0
for sublist_awaitable in asyncio.as_completed(awaitables):
for response in await sublist_awaitable:
total_proofs_found += 1
yield OutboundMessage(
NodeType.FARMER,
Message("challenge_response", response),
Delivery.RESPOND,
)
log.info(
f"{len(awaitables)} plots were eligible for farming for this challenge, time: {time.time() - start}. "
f"{len(awaitables)} plots were eligible for farming {new_challenge.challenge_hash.hex()[:10]}..."
f" Found {total_proofs_found} proofs. Time: {time.time() - start}. "
f"Total {len(self.provers)} plots"
)

View File

@ -113,7 +113,7 @@ def create_plots(
config = add_plot_directory(str(args.final_dir.resolve()), root_path)
if not full_path.exists():
log.info(f"Starting plot {num}")
log.info(f"Starting plot {i + 1}/{num}")
# Creates the plot. This will take a long time for larger plots.
plotter: DiskPlotter = DiskPlotter()
plotter.create_plot_disk(

View File

@ -88,7 +88,7 @@ def load_plots(
) -> Tuple[bool, Dict[Path, PlotInfo], Set[Path], Set[Path]]:
config_file = load_config(root_path, "config.yaml", "harvester")
changed = False
no_key_filenames: Set[Path] = []
no_key_filenames: Set[Path] = set()
log.info(f'Searching directories {config_file["plot_directories"]}')
plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(config_file)
@ -121,7 +121,7 @@ def load_plots(
log.warning(
f"Plot {filename} has a farmer public key that is not in the farmer's pk list."
)
no_key_filenames.append(filename)
no_key_filenames.add(filename)
if not open_no_key_filenames:
continue
@ -132,7 +132,7 @@ def load_plots(
log.warning(
f"Plot {filename} has a pool public key that is not in the farmer's pool pk list."
)
no_key_filenames.append(filename)
no_key_filenames.add(filename)
if not open_no_key_filenames:
continue

View File

@ -77,7 +77,7 @@ class FullNodeRpcApi:
sync_tip_height = 0
sync_progress_height = uint32(0)
if lca.height >= 1:
if lca.height > 1:
newer_block_hex = lca.header_hash.hex()
older_block_hex = self.service.blockchain.height_to_hash[
max(1, lca.height - 100)

View File

@ -7,7 +7,6 @@ from typing import List, Optional, Tuple, Dict, Callable
from src.util.byte_types import hexstr_to_bytes
from src.util.keychain import (
bytes_from_mnemonic,
generate_mnemonic,
bytes_to_mnemonic,
)
@ -373,7 +372,9 @@ class WalletRpcApi:
wallet: CCWallet = self.service.wallet_state_manager.wallets[wallet_id]
puzzle_hash = hexstr_to_bytes(request["innerpuzhash"])
try:
tx = await wallet.generate_signed_transaction(request["amount"], puzzle_hash)
tx = await wallet.generate_signed_transaction(
request["amount"], puzzle_hash
)
except Exception as e:
data = {
"status": "FAILED",
@ -540,9 +541,8 @@ class WalletRpcApi:
if "mnemonic" in request:
# Adding a key from 24 word mnemonic
mnemonic = request["mnemonic"]
entropy = bytes_from_mnemonic(mnemonic)
passphrase = ""
sk = self.service.keychain.add_private_key(entropy, passphrase)
sk = self.service.keychain.add_private_key(" ".join(mnemonic), passphrase)
else:
return {"success": False}

View File

@ -33,7 +33,7 @@ from src.types.proof_of_space import ProofOfSpace
from src.types.proof_of_time import ProofOfTime
from src.types.pool_target import PoolTarget
from src.types.sized_bytes import bytes32
from src.util.keychain import Keychain
from src.util.keychain import Keychain, bytes_to_mnemonic
from src.util.merkle_set import MerkleSet
from src.util.ints import uint8, uint32, uint64, uint128, int512
from src.util.hash import std_hash
@ -81,10 +81,10 @@ class BlockTools:
self.keychain = Keychain("testing-1.8.0", True)
self.keychain.delete_all_keys()
self.farmer_master_sk = self.keychain.add_private_key(
b"block_tools farmer key 1.8", ""
bytes_to_mnemonic(std_hash(b"block_tools farmer key")), ""
)
self.pool_master_sk = self.keychain.add_private_key(
b"block_tools pool key 1.8", ""
bytes_to_mnemonic(std_hash(b"block_tools pool key")), ""
)
self.farmer_pk = master_sk_to_farmer_sk(self.farmer_master_sk).get_g1()
self.pool_pk = master_sk_to_pool_sk(self.pool_master_sk).get_g1()

View File

@ -18,5 +18,11 @@ def run_program(
pre_eval_f=None,
):
return default_run_program(
program, args, quote_kw, args_kw, operator_lookup, max_cost, pre_eval_f=pre_eval_f
program,
args,
quote_kw,
args_kw,
operator_lookup,
max_cost,
pre_eval_f=pre_eval_f,
)

View File

@ -35,24 +35,27 @@ def bip39_word_list() -> str:
return pkg_resources.resource_string(__name__, "english.txt").decode()
def generate_mnemonic() -> List[str]:
def generate_mnemonic() -> str:
mnemonic_bytes = token_bytes(32)
mnemonic = bytes_to_mnemonic(mnemonic_bytes)
return mnemonic
def bytes_to_mnemonic(mnemonic_bytes: bytes):
seed_array = bytearray(mnemonic_bytes)
if len(mnemonic_bytes) not in [16, 20, 24, 28, 32]:
raise ValueError(
f"Data length should be one of the following: [16, 20, 24, 28, 32], but it is {len(mnemonic_bytes)}."
)
word_list = bip39_word_list().splitlines()
CS = len(mnemonic_bytes) // 4
checksum = bytes(std_hash(mnemonic_bytes))
checksum = BitArray(bytes(std_hash(mnemonic_bytes)))[:CS]
seed_array.append(checksum[0])
bytes_for_mnemonic = bytes(seed_array)
bitarray = BitArray(bytes_for_mnemonic)
bitarray = BitArray(mnemonic_bytes) + checksum
mnemonics = []
assert len(bitarray) % 11 == 0
for i in range(0, 24):
for i in range(0, len(bitarray) // 11):
start = i * 11
end = start + 11
bits = bitarray[start:end]
@ -60,35 +63,46 @@ def bytes_to_mnemonic(mnemonic_bytes: bytes):
m_word = word_list[m_word_poition]
mnemonics.append(m_word)
return mnemonics
return " ".join(mnemonics)
def bytes_from_mnemonic(mnemonic: List[str]):
def bytes_from_mnemonic(mnemonic_str: str):
mnemonic: List[str] = mnemonic_str.split(" ")
if len(mnemonic) not in [12, 15, 18, 21, 24]:
raise ValueError("Invalid mnemonic length")
word_list = {word: i for i, word in enumerate(bip39_word_list().splitlines())}
bit_array = BitArray()
for i in range(0, 24):
for i in range(0, len(mnemonic)):
word = mnemonic[i]
value = word_list[word]
bit_array.append(BitArray(uint=value, length=11))
all_bytes = bit_array.bytes
entropy_bytes = all_bytes[:32]
checksum_bytes = all_bytes[32]
checksum = std_hash(entropy_bytes)
CS: int = len(mnemonic) // 3
ENT: int = len(mnemonic) * 11 - CS
assert len(bit_array) == len(mnemonic) * 11
assert ENT % 32 == 0
if checksum[0] != checksum_bytes:
entropy_bytes = bit_array[:ENT].bytes
checksum_bytes = bit_array[ENT:]
checksum = BitArray(std_hash(entropy_bytes))[:CS]
assert len(checksum_bytes) == CS
if checksum != checksum_bytes:
raise ValueError("Invalid order of mnemonic words")
return entropy_bytes
def entropy_to_seed(entropy: bytes, passphrase):
def mnemonic_to_seed(mnemonic: str, passphrase):
"""
Uses BIP39 standard to derive a seed from entropy bytes.
"""
salt_str: str = "mnemonic" + passphrase
salt = unicodedata.normalize("NFKD", salt_str).encode("utf-8")
seed = pbkdf2_hmac("sha512", entropy, salt, 2048)
mnemonic_normalized = unicodedata.normalize("NFKD", mnemonic).encode("utf-8")
seed = pbkdf2_hmac("sha512", mnemonic_normalized, salt, 2048)
assert len(seed) == 64
return seed
@ -108,7 +122,7 @@ class Keychain:
testing: bool
user: str
def __init__(self, user: str = "user-1.8.0", testing: bool = False):
def __init__(self, user: str = "user-chia-1.8", testing: bool = False):
self.testing = testing
self.user = user
@ -157,13 +171,14 @@ class Keychain:
return index
index += 1
def add_private_key(self, entropy: bytes, passphrase: str) -> PrivateKey:
def add_private_key(self, mnemonic: str, passphrase: str) -> PrivateKey:
"""
Adds a private key to the keychain, with the given entropy and passphrase. The
keychain itself will store the public key, and the entropy bytes,
but not the passphrase.
"""
seed = entropy_to_seed(entropy, passphrase)
seed = mnemonic_to_seed(mnemonic, passphrase)
entropy = bytes_from_mnemonic(mnemonic)
index = self._get_free_private_key_index()
key = PrivateKey.from_seed(seed)
fingerprint = key.get_g1().get_fingerprint()
@ -191,7 +206,8 @@ class Keychain:
if pkent is not None:
pk, ent = pkent
for pp in passphrases:
seed = entropy_to_seed(ent, pp)
mnemonic = bytes_to_mnemonic(ent)
seed = mnemonic_to_seed(mnemonic, pp)
key = PrivateKey.from_seed(seed)
if key.get_g1() == pk:
return (key, ent)
@ -214,7 +230,8 @@ class Keychain:
if pkent is not None:
pk, ent = pkent
for pp in passphrases:
seed = entropy_to_seed(ent, pp)
mnemonic = bytes_to_mnemonic(ent)
seed = mnemonic_to_seed(mnemonic, pp)
key = PrivateKey.from_seed(seed)
if key.get_g1() == pk:
all_keys.append((key, ent))

View File

@ -447,9 +447,7 @@ class Wallet(AbstractWallet):
output_created = coin
else:
solution = self.make_solution(consumed=[output_created.name()])
list_of_solutions.append(
CoinSolution(coin, Program.to([puzzle, solution]))
)
list_of_solutions.append(CoinSolution(coin, Program.to([puzzle, solution])))
new_sigs = await self.get_sigs_for_innerpuz_with_innersol(puzzle, solution)
sigs = sigs + new_sigs

View File

@ -9,7 +9,7 @@ from src.server.connection import NodeType
from src.server.server import ChiaServer
from src.simulator.full_node_simulator import FullNodeSimulator
from src.timelord_launcher import spawn_process, kill_processes
from src.util.keychain import Keychain
from src.util.keychain import Keychain, bytes_to_mnemonic
from src.wallet.wallet_node import WalletNode
from src.util.config import load_config
from src.harvester import Harvester
@ -150,7 +150,7 @@ async def setup_wallet_node(
entropy = token_bytes(32)
keychain = Keychain(entropy.hex(), True)
keychain.add_private_key(entropy, "")
keychain.add_private_key(bytes_to_mnemonic(entropy), "")
consensus_constants = constants_for_dic(dic)
first_pk = keychain.get_first_public_key()
assert first_pk is not None

View File

@ -0,0 +1,148 @@
{
"english": [
[
"00000000000000000000000000000000",
"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about",
"c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e53495531f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04",
"xprv9s21ZrQH143K3h3fDYiay8mocZ3afhfULfb5GX8kCBdno77K4HiA15Tg23wpbeF1pLfs1c5SPmYHrEpTuuRhxMwvKDwqdKiGJS9XFKzUsAF"
],
[
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"legal winner thank year wave sausage worth useful legal winner thank yellow",
"2e8905819b8723fe2c1d161860e5ee1830318dbf49a83bd451cfb8440c28bd6fa457fe1296106559a3c80937a1c1069be3a3a5bd381ee6260e8d9739fce1f607",
"xprv9s21ZrQH143K2gA81bYFHqU68xz1cX2APaSq5tt6MFSLeXnCKV1RVUJt9FWNTbrrryem4ZckN8k4Ls1H6nwdvDTvnV7zEXs2HgPezuVccsq"
],
[
"80808080808080808080808080808080",
"letter advice cage absurd amount doctor acoustic avoid letter advice cage above",
"d71de856f81a8acc65e6fc851a38d4d7ec216fd0796d0a6827a3ad6ed5511a30fa280f12eb2e47ed2ac03b5c462a0358d18d69fe4f985ec81778c1b370b652a8",
"xprv9s21ZrQH143K2shfP28KM3nr5Ap1SXjz8gc2rAqqMEynmjt6o1qboCDpxckqXavCwdnYds6yBHZGKHv7ef2eTXy461PXUjBFQg6PrwY4Gzq"
],
[
"ffffffffffffffffffffffffffffffff",
"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong",
"ac27495480225222079d7be181583751e86f571027b0497b5b5d11218e0a8a13332572917f0f8e5a589620c6f15b11c61dee327651a14c34e18231052e48c069",
"xprv9s21ZrQH143K2V4oox4M8Zmhi2Fjx5XK4Lf7GKRvPSgydU3mjZuKGCTg7UPiBUD7ydVPvSLtg9hjp7MQTYsW67rZHAXeccqYqrsx8LcXnyd"
],
[
"000000000000000000000000000000000000000000000000",
"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent",
"035895f2f481b1b0f01fcf8c289c794660b289981a78f8106447707fdd9666ca06da5a9a565181599b79f53b844d8a71dd9f439c52a3d7b3e8a79c906ac845fa",
"xprv9s21ZrQH143K3mEDrypcZ2usWqFgzKB6jBBx9B6GfC7fu26X6hPRzVjzkqkPvDqp6g5eypdk6cyhGnBngbjeHTe4LsuLG1cCmKJka5SMkmU"
],
[
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will",
"f2b94508732bcbacbcc020faefecfc89feafa6649a5491b8c952cede496c214a0c7b3c392d168748f2d4a612bada0753b52a1c7ac53c1e93abd5c6320b9e95dd",
"xprv9s21ZrQH143K3Lv9MZLj16np5GzLe7tDKQfVusBni7toqJGcnKRtHSxUwbKUyUWiwpK55g1DUSsw76TF1T93VT4gz4wt5RM23pkaQLnvBh7"
],
[
"808080808080808080808080808080808080808080808080",
"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always",
"107d7c02a5aa6f38c58083ff74f04c607c2d2c0ecc55501dadd72d025b751bc27fe913ffb796f841c49b1d33b610cf0e91d3aa239027f5e99fe4ce9e5088cd65",
"xprv9s21ZrQH143K3VPCbxbUtpkh9pRG371UCLDz3BjceqP1jz7XZsQ5EnNkYAEkfeZp62cDNj13ZTEVG1TEro9sZ9grfRmcYWLBhCocViKEJae"
],
[
"ffffffffffffffffffffffffffffffffffffffffffffffff",
"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when",
"0cd6e5d827bb62eb8fc1e262254223817fd068a74b5b449cc2f667c3f1f985a76379b43348d952e2265b4cd129090758b3e3c2c49103b5051aac2eaeb890a528",
"xprv9s21ZrQH143K36Ao5jHRVhFGDbLP6FCx8BEEmpru77ef3bmA928BxsqvVM27WnvvyfWywiFN8K6yToqMaGYfzS6Db1EHAXT5TuyCLBXUfdm"
],
[
"0000000000000000000000000000000000000000000000000000000000000000",
"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art",
"bda85446c68413707090a52022edd26a1c9462295029f2e60cd7c4f2bbd3097170af7a4d73245cafa9c3cca8d561a7c3de6f5d4a10be8ed2a5e608d68f92fcc8",
"xprv9s21ZrQH143K32qBagUJAMU2LsHg3ka7jqMcV98Y7gVeVyNStwYS3U7yVVoDZ4btbRNf4h6ibWpY22iRmXq35qgLs79f312g2kj5539ebPM"
],
[
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title",
"bc09fca1804f7e69da93c2f2028eb238c227f2e9dda30cd63699232578480a4021b146ad717fbb7e451ce9eb835f43620bf5c514db0f8add49f5d121449d3e87",
"xprv9s21ZrQH143K3Y1sd2XVu9wtqxJRvybCfAetjUrMMco6r3v9qZTBeXiBZkS8JxWbcGJZyio8TrZtm6pkbzG8SYt1sxwNLh3Wx7to5pgiVFU"
],
[
"8080808080808080808080808080808080808080808080808080808080808080",
"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless",
"c0c519bd0e91a2ed54357d9d1ebef6f5af218a153624cf4f2da911a0ed8f7a09e2ef61af0aca007096df430022f7a2b6fb91661a9589097069720d015e4e982f",
"xprv9s21ZrQH143K3CSnQNYC3MqAAqHwxeTLhDbhF43A4ss4ciWNmCY9zQGvAKUSqVUf2vPHBTSE1rB2pg4avopqSiLVzXEU8KziNnVPauTqLRo"
],
[
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote",
"dd48c104698c30cfe2b6142103248622fb7bb0ff692eebb00089b32d22484e1613912f0a5b694407be899ffd31ed3992c456cdf60f5d4564b8ba3f05a69890ad",
"xprv9s21ZrQH143K2WFF16X85T2QCpndrGwx6GueB72Zf3AHwHJaknRXNF37ZmDrtHrrLSHvbuRejXcnYxoZKvRquTPyp2JiNG3XcjQyzSEgqCB"
],
[
"9e885d952ad362caeb4efe34a8e91bd2",
"ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic",
"274ddc525802f7c828d8ef7ddbcdc5304e87ac3535913611fbbfa986d0c9e5476c91689f9c8a54fd55bd38606aa6a8595ad213d4c9c9f9aca3fb217069a41028",
"xprv9s21ZrQH143K2oZ9stBYpoaZ2ktHj7jLz7iMqpgg1En8kKFTXJHsjxry1JbKH19YrDTicVwKPehFKTbmaxgVEc5TpHdS1aYhB2s9aFJBeJH"
],
[
"6610b25967cdcca9d59875f5cb50b0ea75433311869e930b",
"gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog",
"628c3827a8823298ee685db84f55caa34b5cc195a778e52d45f59bcf75aba68e4d7590e101dc414bc1bbd5737666fbbef35d1f1903953b66624f910feef245ac",
"xprv9s21ZrQH143K3uT8eQowUjsxrmsA9YUuQQK1RLqFufzybxD6DH6gPY7NjJ5G3EPHjsWDrs9iivSbmvjc9DQJbJGatfa9pv4MZ3wjr8qWPAK"
],
[
"68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c",
"hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length",
"64c87cde7e12ecf6704ab95bb1408bef047c22db4cc7491c4271d170a1b213d20b385bc1588d9c7b38f1b39d415665b8a9030c9ec653d75e65f847d8fc1fc440",
"xprv9s21ZrQH143K2XTAhys3pMNcGn261Fi5Ta2Pw8PwaVPhg3D8DWkzWQwjTJfskj8ofb81i9NP2cUNKxwjueJHHMQAnxtivTA75uUFqPFeWzk"
],
[
"c0ba5a8e914111210f2bd131f3d5e08d",
"scheme spot photo card baby mountain device kick cradle pact join borrow",
"ea725895aaae8d4c1cf682c1bfd2d358d52ed9f0f0591131b559e2724bb234fca05aa9c02c57407e04ee9dc3b454aa63fbff483a8b11de949624b9f1831a9612",
"xprv9s21ZrQH143K3FperxDp8vFsFycKCRcJGAFmcV7umQmcnMZaLtZRt13QJDsoS5F6oYT6BB4sS6zmTmyQAEkJKxJ7yByDNtRe5asP2jFGhT6"
],
[
"6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3",
"horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave",
"fd579828af3da1d32544ce4db5c73d53fc8acc4ddb1e3b251a31179cdb71e853c56d2fcb11aed39898ce6c34b10b5382772db8796e52837b54468aeb312cfc3d",
"xprv9s21ZrQH143K3R1SfVZZLtVbXEB9ryVxmVtVMsMwmEyEvgXN6Q84LKkLRmf4ST6QrLeBm3jQsb9gx1uo23TS7vo3vAkZGZz71uuLCcywUkt"
],
[
"9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863",
"panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside",
"72be8e052fc4919d2adf28d5306b5474b0069df35b02303de8c1729c9538dbb6fc2d731d5f832193cd9fb6aeecbc469594a70e3dd50811b5067f3b88b28c3e8d",
"xprv9s21ZrQH143K2WNnKmssvZYM96VAr47iHUQUTUyUXH3sAGNjhJANddnhw3i3y3pBbRAVk5M5qUGFr4rHbEWwXgX4qrvrceifCYQJbbFDems"
],
[
"23db8160a31d3e0dca3688ed941adbf3",
"cat swing flag economy stadium alone churn speed unique patch report train",
"deb5f45449e615feff5640f2e49f933ff51895de3b4381832b3139941c57b59205a42480c52175b6efcffaa58a2503887c1e8b363a707256bdd2b587b46541f5",
"xprv9s21ZrQH143K4G28omGMogEoYgDQuigBo8AFHAGDaJdqQ99QKMQ5J6fYTMfANTJy6xBmhvsNZ1CJzRZ64PWbnTFUn6CDV2FxoMDLXdk95DQ"
],
[
"8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0",
"light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access",
"4cbdff1ca2db800fd61cae72a57475fdc6bab03e441fd63f96dabd1f183ef5b782925f00105f318309a7e9c3ea6967c7801e46c8a58082674c860a37b93eda02",
"xprv9s21ZrQH143K3wtsvY8L2aZyxkiWULZH4vyQE5XkHTXkmx8gHo6RUEfH3Jyr6NwkJhvano7Xb2o6UqFKWHVo5scE31SGDCAUsgVhiUuUDyh"
],
[
"066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad",
"all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform",
"26e975ec644423f4a4c4f4215ef09b4bd7ef924e85d1d17c4cf3f136c2863cf6df0a475045652c57eb5fb41513ca2a2d67722b77e954b4b3fc11f7590449191d",
"xprv9s21ZrQH143K3rEfqSM4QZRVmiMuSWY9wugscmaCjYja3SbUD3KPEB1a7QXJoajyR2T1SiXU7rFVRXMV9XdYVSZe7JoUXdP4SRHTxsT1nzm"
],
[
"f30f8c1da665478f49b001d94c5fc452",
"vessel ladder alter error federal sibling chat ability sun glass valve picture",
"2aaa9242daafcee6aa9d7269f17d4efe271e1b9a529178d7dc139cd18747090bf9d60295d0ce74309a78852a9caadf0af48aae1c6253839624076224374bc63f",
"xprv9s21ZrQH143K2QWV9Wn8Vvs6jbqfF1YbTCdURQW9dLFKDovpKaKrqS3SEWsXCu6ZNky9PSAENg6c9AQYHcg4PjopRGGKmdD313ZHszymnps"
],
[
"c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05",
"scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump",
"7b4a10be9d98e6cba265566db7f136718e1398c71cb581e1b2f464cac1ceedf4f3e274dc270003c670ad8d02c4558b2f8e39edea2775c9e232c7cb798b069e88",
"xprv9s21ZrQH143K4aERa2bq7559eMCCEs2QmmqVjUuzfy5eAeDX4mqZffkYwpzGQRE2YEEeLVRoH4CSHxianrFaVnMN2RYaPUZJhJx8S5j6puX"
],
[
"f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f",
"void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold",
"01f5bced59dec48e362f2c45b5de68b9fd6c92c6634f44d6d40aab69056506f0e35524a518034ddc1192e1dacd32c1ed3eaa3c3b131c88ed8e7e54c49a5d0998",
"xprv9s21ZrQH143K39rnQJknpH1WEPFJrzmAqqasiDcVrNuk926oizzJDDQkdiTvNPr2FYDYzWgiMiC63YmfPAa2oPyNB23r2g7d1yiK6WpqaQS"
]
]
}

View File

@ -1,11 +1,13 @@
import unittest
import json
from secrets import token_bytes
from blspy import PrivateKey
from src.util.keychain import (
Keychain,
generate_mnemonic,
bytes_from_mnemonic,
entropy_to_seed,
bytes_to_mnemonic,
mnemonic_to_seed,
)
@ -21,15 +23,15 @@ class TesKeychain(unittest.TestCase):
mnemonic = generate_mnemonic()
entropy = bytes_from_mnemonic(mnemonic)
assert bytes_to_mnemonic(entropy) == mnemonic
mnemonic_2 = generate_mnemonic()
entropy_2 = bytes_from_mnemonic(mnemonic_2)
kc.add_private_key(entropy, "")
kc.add_private_key(mnemonic, "")
assert kc._get_free_private_key_index() == 1
assert len(kc.get_all_private_keys()) == 1
kc.add_private_key(entropy_2, "")
kc.add_private_key(entropy_2, "") # checks to not add duplicates
kc.add_private_key(mnemonic_2, "")
kc.add_private_key(mnemonic_2, "") # checks to not add duplicates
assert kc._get_free_private_key_index() == 2
assert len(kc.get_all_private_keys()) == 2
@ -41,19 +43,19 @@ class TesKeychain(unittest.TestCase):
assert len(kc.get_all_private_keys()) == 2
seed_2 = entropy_to_seed(entropy_2, "")
seed_2 = mnemonic_to_seed(mnemonic, "")
seed_key_2 = PrivateKey.from_seed(seed_2)
kc.delete_key_by_fingerprint(seed_key_2.get_g1().get_fingerprint())
assert kc._get_free_private_key_index() == 1
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 1
kc.delete_all_keys()
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 0
kc.add_private_key(token_bytes(32), "my passphrase")
kc.add_private_key(token_bytes(32), "")
kc.add_private_key(token_bytes(32), "third passphrase")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "my passphrase")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "third passphrase")
assert len(kc.get_all_public_keys()) == 3
assert len(kc.get_all_private_keys()) == 1
@ -73,5 +75,67 @@ class TesKeychain(unittest.TestCase):
assert kc.get_first_public_key() is not None
kc.delete_all_keys()
kc.add_private_key(token_bytes(32), "my passphrase")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "my passphrase")
assert kc.get_first_public_key() is not None
def test_bip39_eip2333_test_vector(self):
kc: Keychain = Keychain(testing=True)
kc.delete_all_keys()
mnemonic = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
passphrase = "TREZOR"
print("entropy to seed:", mnemonic_to_seed(mnemonic, passphrase).hex())
master_sk = kc.add_private_key(mnemonic, passphrase)
tv_master_int = (
5399117110774477986698372024995405256382522670366369834617409486544348441851
)
tv_child_int = 11812940737387919040225825939013910852517748782307378293770044673328955938106
assert master_sk == PrivateKey.from_bytes(tv_master_int.to_bytes(32, "big"))
child_sk = master_sk.derive_child(0)
assert child_sk == PrivateKey.from_bytes(tv_child_int.to_bytes(32, "big"))
def test_bip39_test_vectors_trezor(self):
with open("tests/util/bip39_test_vectors.json") as f:
all_vectors = json.loads(f.read())
for vector_list in all_vectors["english"]:
entropy_bytes = bytes.fromhex(vector_list[0])
mnemonic = vector_list[1]
seed = bytes.fromhex(vector_list[2])
assert bytes_from_mnemonic(mnemonic) == entropy_bytes
assert bytes_to_mnemonic(entropy_bytes) == mnemonic
assert mnemonic_to_seed(mnemonic, "TREZOR") == seed
def test_utf8_nfkd(self):
# Test code from trezor:
# Copyright (c) 2013 Pavol Rusnak
# Copyright (c) 2017 mruddy
# https://github.com/trezor/python-mnemonic/blob/master/test_mnemonic.py
# The same sentence in various UTF-8 forms
words_nfkd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a" # noqa: E501
words_nfc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f" # noqa: E501
words_nfkc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f" # noqa: E501
words_nfd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a" # noqa: E501
passphrase_nfkd = (
u"Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
)
passphrase_nfc = (
u"Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
)
passphrase_nfkc = (
u"Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
)
passphrase_nfd = (
u"Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
)
seed_nfkd = mnemonic_to_seed(words_nfkd, passphrase_nfkd)
seed_nfc = mnemonic_to_seed(words_nfc, passphrase_nfc)
seed_nfkc = mnemonic_to_seed(words_nfkc, passphrase_nfkc)
seed_nfd = mnemonic_to_seed(words_nfd, passphrase_nfd)
assert seed_nfkd == seed_nfc
assert seed_nfkd == seed_nfkc
assert seed_nfkd == seed_nfd