bip39/
lib.rs

1// Rust Bitcoin Library
2// Written in 2020 by
3//	 Steven Roose <steven@stevenroose.org>
4// To the extent possible under law, the author(s) have dedicated all
5// copyright and related and neighboring rights to this software to
6// the public domain worldwide. This software is distributed without
7// any warranty.
8//
9// You should have received a copy of the CC0 Public Domain Dedication
10// along with this software.
11// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
12//
13
14//! # BIP39 Mnemonic Codes
15//!
16//! Library crate implementing [BIP39](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki)
17//!
18
19#![deny(non_upper_case_globals)]
20#![deny(non_camel_case_types)]
21#![deny(non_snake_case)]
22#![deny(unused_mut)]
23#![deny(dead_code)]
24#![deny(unused_imports)]
25#![deny(missing_docs)]
26#![cfg_attr(all(not(test), not(feature = "std")), no_std)]
27#![cfg_attr(docsrs, feature(doc_auto_cfg))]
28
29#[cfg(any(test, feature = "std"))]
30pub extern crate core;
31
32#[cfg(feature = "alloc")]
33extern crate alloc;
34
35extern crate bitcoin_hashes;
36
37#[cfg(feature = "unicode-normalization")]
38extern crate unicode_normalization;
39
40#[cfg(feature = "rand")]
41pub extern crate crate_rand as rand;
42#[cfg(feature = "rand_core")]
43pub extern crate rand_core;
44#[cfg(feature = "serde")]
45pub extern crate serde;
46
47#[cfg(feature = "alloc")]
48use alloc::borrow::Cow;
49use core::{fmt, str};
50
51#[cfg(all(feature = "alloc", not(feature = "std")))]
52use alloc::{string::ToString, vec::Vec};
53
54/// We support a wide range of dependency versions for `rand` and `rand_core` and not
55/// all versions play nicely together. These re-exports fix that.
56#[cfg(all(feature = "rand", feature = "rand_core"))]
57use rand::{CryptoRng, RngCore};
58#[cfg(all(not(feature = "rand"), feature = "rand_core"))]
59use rand_core::{CryptoRng, RngCore};
60
61#[cfg(feature = "std")]
62use std::error;
63
64use bitcoin_hashes::{sha256, Hash};
65
66#[cfg(feature = "unicode-normalization")]
67use unicode_normalization::UnicodeNormalization;
68
69#[cfg(feature = "zeroize")]
70extern crate zeroize;
71#[cfg(feature = "zeroize")]
72use zeroize::{Zeroize, ZeroizeOnDrop};
73
74#[macro_use]
75mod internal_macros;
76mod language;
77mod pbkdf2;
78
79pub use language::Language;
80
81/// The minimum number of words in a mnemonic.
82#[allow(unused)]
83const MIN_NB_WORDS: usize = 12;
84
85/// The maximum number of words in a mnemonic.
86const MAX_NB_WORDS: usize = 24;
87
88/// The index used to indicate the mnemonic ended.
89const EOF: u16 = u16::max_value();
90
91/// A structured used in the [Error::AmbiguousLanguages] variant that iterates
92/// over the possible languages.
93#[derive(Debug, Clone, PartialEq, Eq, Copy)]
94pub struct AmbiguousLanguages([bool; language::MAX_NB_LANGUAGES]);
95
96impl AmbiguousLanguages {
97	/// Presents the possible languages in the form of a slice of booleans
98	/// that correspond to the occurrences in [Language::ALL].
99	pub fn as_bools(&self) -> &[bool; language::MAX_NB_LANGUAGES] {
100		&self.0
101	}
102
103	/// An iterator over the possible languages.
104	pub fn iter(&self) -> impl Iterator<Item = Language> + '_ {
105		Language::ALL.iter().enumerate().filter(move |(i, _)| self.0[*i]).map(|(_, l)| *l)
106	}
107
108	/// Returns a vector of the possible languages.
109	#[cfg(feature = "alloc")]
110	pub fn to_vec(&self) -> Vec<Language> {
111		self.iter().collect()
112	}
113}
114
115/// A BIP39 error.
116#[derive(Debug, Clone, PartialEq, Eq, Copy)]
117pub enum Error {
118	/// Mnemonic has a word count that is not a multiple of 6.
119	BadWordCount(usize),
120	/// Mnemonic contains an unknown word.
121	/// Error contains the index of the word.
122	/// Use `mnemonic.split_whitespace().get(i)` to get the word.
123	UnknownWord(usize),
124	/// Entropy was not a multiple of 32 bits or between 128-256n bits in length.
125	BadEntropyBitCount(usize),
126	/// The mnemonic has an invalid checksum.
127	InvalidChecksum,
128	/// The mnemonic can be interpreted as multiple languages.
129	/// Use the helper methods of the inner struct to inspect
130	/// which languages are possible.
131	AmbiguousLanguages(AmbiguousLanguages),
132}
133
134impl fmt::Display for Error {
135	fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
136		match *self {
137			Error::BadWordCount(c) => {
138				write!(
139					f,
140					"mnemonic has an invalid word count: {}. Word count must be 12, 15, 18, 21, \
141					or 24",
142					c
143				)
144			}
145			Error::UnknownWord(i) => write!(f, "mnemonic contains an unknown word (word {})", i,),
146			Error::BadEntropyBitCount(c) => write!(
147				f,
148				"entropy was not between 128-256 bits or not a multiple of 32 bits: {} bits",
149				c,
150			),
151			Error::InvalidChecksum => write!(f, "the mnemonic has an invalid checksum"),
152			Error::AmbiguousLanguages(a) => {
153				write!(f, "ambiguous word list: ")?;
154				for (i, lang) in a.iter().enumerate() {
155					if i == 0 {
156						write!(f, "{}", lang)?;
157					} else {
158						write!(f, ", {}", lang)?;
159					}
160				}
161				Ok(())
162			}
163		}
164	}
165}
166
167#[cfg(feature = "std")]
168impl error::Error for Error {}
169
170/// A mnemonic code.
171///
172/// The [core::str::FromStr] implementation will try to determine the language of the
173/// mnemonic from all the supported languages. (Languages have to be explicitly enabled using
174/// the Cargo features.)
175///
176/// Supported number of words are 12, 15, 18, 21, and 24.
177#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
178#[cfg_attr(feature = "zeroize", derive(Zeroize, ZeroizeOnDrop))]
179pub struct Mnemonic {
180	/// The language the mnemonic.
181	lang: Language,
182	/// The indices of the words.
183	/// Mnemonics with less than the max nb of words are terminated with EOF.
184	words: [u16; MAX_NB_WORDS],
185}
186
187#[cfg(feature = "zeroize")]
188impl zeroize::DefaultIsZeroes for Language {}
189
190serde_string_impl!(Mnemonic, "a BIP-39 Mnemonic Code");
191
192impl Mnemonic {
193	/// Ensure the content of the [Cow] is normalized UTF8.
194	/// Performing this on a [Cow] means that all allocations for normalization
195	/// can be avoided for languages without special UTF8 characters.
196	#[inline]
197	#[cfg(feature = "unicode-normalization")]
198	pub fn normalize_utf8_cow<'a>(cow: &mut Cow<'a, str>) {
199		let is_nfkd = unicode_normalization::is_nfkd_quick(cow.as_ref().chars());
200		if is_nfkd != unicode_normalization::IsNormalized::Yes {
201			*cow = Cow::Owned(cow.as_ref().nfkd().to_string());
202		}
203	}
204
205	/// Create a new [Mnemonic] in the specified language from the given entropy.
206	/// Entropy must be a multiple of 32 bits (4 bytes) and 128-256 bits in length.
207	pub fn from_entropy_in(language: Language, entropy: &[u8]) -> Result<Mnemonic, Error> {
208		const MAX_ENTROPY_BITS: usize = 256;
209		const MIN_ENTROPY_BITS: usize = 128;
210		const MAX_CHECKSUM_BITS: usize = 8;
211
212		let nb_bytes = entropy.len();
213		let nb_bits = nb_bytes * 8;
214
215		if nb_bits % 32 != 0 {
216			return Err(Error::BadEntropyBitCount(nb_bits));
217		}
218		if nb_bits < MIN_ENTROPY_BITS || nb_bits > MAX_ENTROPY_BITS {
219			return Err(Error::BadEntropyBitCount(nb_bits));
220		}
221
222		let check = sha256::Hash::hash(&entropy);
223		let mut bits = [false; MAX_ENTROPY_BITS + MAX_CHECKSUM_BITS];
224		for i in 0..nb_bytes {
225			for j in 0..8 {
226				bits[i * 8 + j] = (entropy[i] & (1 << (7 - j))) > 0;
227			}
228		}
229		for i in 0..nb_bytes / 4 {
230			bits[8 * nb_bytes + i] = (check[i / 8] & (1 << (7 - (i % 8)))) > 0;
231		}
232
233		let mut words = [EOF; MAX_NB_WORDS];
234		let nb_words = nb_bytes * 3 / 4;
235		for i in 0..nb_words {
236			let mut idx = 0;
237			for j in 0..11 {
238				if bits[i * 11 + j] {
239					idx += 1 << (10 - j);
240				}
241			}
242			words[i] = idx;
243		}
244
245		Ok(Mnemonic {
246			lang: language,
247			words: words,
248		})
249	}
250
251	/// Create a new English [Mnemonic] from the given entropy.
252	/// Entropy must be a multiple of 32 bits (4 bytes) and 128-256 bits in length.
253	pub fn from_entropy(entropy: &[u8]) -> Result<Mnemonic, Error> {
254		Mnemonic::from_entropy_in(Language::English, entropy)
255	}
256
257	/// Generate a new [Mnemonic] in the given language
258	/// with the given randomness source.
259	/// For the different supported word counts, see documentation on [Mnemonic].
260	///
261	/// Example:
262	///
263	/// ```
264	/// use bip39::{Mnemonic, Language};
265	///
266	/// let mut rng = bip39::rand::thread_rng();
267	/// let m = Mnemonic::generate_in_with(&mut rng, Language::English, 24).unwrap();
268	/// ```
269	#[cfg(feature = "rand_core")]
270	pub fn generate_in_with<R>(
271		rng: &mut R,
272		language: Language,
273		word_count: usize,
274	) -> Result<Mnemonic, Error>
275	where
276		R: RngCore + CryptoRng,
277	{
278		if is_invalid_word_count(word_count) {
279			return Err(Error::BadWordCount(word_count));
280		}
281
282		let entropy_bytes = (word_count / 3) * 4;
283		let mut entropy = [0u8; (MAX_NB_WORDS / 3) * 4];
284		RngCore::fill_bytes(rng, &mut entropy[0..entropy_bytes]);
285		Mnemonic::from_entropy_in(language, &entropy[0..entropy_bytes])
286	}
287
288	/// Generate a new [Mnemonic] in the given language.
289	/// For the different supported word counts, see documentation on [Mnemonic].
290	///
291	/// Example:
292	///
293	/// ```
294	/// use bip39::{Mnemonic, Language};
295	///
296	/// let m = Mnemonic::generate_in(Language::English, 24).unwrap();
297	/// ```
298	#[cfg(feature = "rand")]
299	pub fn generate_in(language: Language, word_count: usize) -> Result<Mnemonic, Error> {
300		Mnemonic::generate_in_with(&mut rand::thread_rng(), language, word_count)
301	}
302
303	/// Generate a new [Mnemonic] in English.
304	/// For the different supported word counts, see documentation on [Mnemonic].
305	///
306	/// Example:
307	///
308	/// ```
309	/// use bip39::Mnemonic;
310	///
311	/// let m = Mnemonic::generate(24).unwrap();
312	/// ```
313	#[cfg(feature = "rand")]
314	pub fn generate(word_count: usize) -> Result<Mnemonic, Error> {
315		Mnemonic::generate_in(Language::English, word_count)
316	}
317
318	/// Get the language of the [Mnemonic].
319	pub fn language(&self) -> Language {
320		self.lang
321	}
322
323	/// Returns an iterator over the words of the [Mnemonic].
324	///
325	/// # Examples
326	///
327	/// Basic usage:
328	///
329	/// ```
330	/// use bip39::Mnemonic;
331	///
332	/// let mnemonic = Mnemonic::from_entropy(&[0; 32]).unwrap();
333	/// for (i, word) in mnemonic.words().enumerate() {
334	///     println!("{}. {}", i, word);
335	/// }
336	/// ```
337	pub fn words(&self) -> impl Iterator<Item = &'static str> + Clone + '_ {
338		let list = self.lang.word_list();
339		self.word_indices().map(move |i| list[i])
340	}
341
342	/// Returns an iterator over the words of the [Mnemonic].
343	#[deprecated(since = "2.1.0", note = "Use Mnemonic::words instead")]
344	pub fn word_iter(&self) -> impl Iterator<Item = &'static str> + Clone + '_ {
345		self.words()
346	}
347
348	/// Returns an iterator over [Mnemonic] word indices.
349	///
350	/// # Examples
351	///
352	/// Basic usage:
353	///
354	/// ```
355	/// use bip39::{Language, Mnemonic};
356	///
357	/// let list = Language::English.word_list();
358	/// let mnemonic = Mnemonic::from_entropy(&[0; 32]).unwrap();
359	/// for i in mnemonic.word_indices() {
360	/// 	println!("{} ({})", list[i], i);
361	/// }
362	/// ```
363	pub fn word_indices(&self) -> impl Iterator<Item = usize> + Clone + '_ {
364		self.words.iter().take_while(|&&w| w != EOF).map(|w| *w as usize)
365	}
366
367	/// Determine the language of the mnemonic as a word iterator.
368	/// See documentation on [Mnemonic::language_of] for more info.
369	fn language_of_iter<'a, W: Iterator<Item = &'a str>>(words: W) -> Result<Language, Error> {
370		let mut words = words.peekable();
371		let langs = Language::ALL;
372		{
373			// Start scope to drop first_word so that words can be reborrowed later.
374			let first_word = words.peek().ok_or(Error::BadWordCount(0))?;
375			if first_word.len() == 0 {
376				return Err(Error::BadWordCount(0));
377			}
378
379			// We first try find the first word in wordlists that
380			// have guaranteed unique words.
381			for language in langs.iter().filter(|l| l.unique_words()) {
382				if language.find_word(first_word).is_some() {
383					return Ok(*language);
384				}
385			}
386		}
387
388		// If that didn't work, we start with all possible languages
389		// (those without unique words), and eliminate until there is
390		// just one left.
391		let mut possible = [false; language::MAX_NB_LANGUAGES];
392		for (i, lang) in langs.iter().enumerate() {
393			// To start, only consider lists that don't have unique words.
394			// Those were considered above.
395			possible[i] = !lang.unique_words();
396		}
397		for (idx, word) in words.enumerate() {
398			// Scrap languages that don't have this word.
399			for (i, lang) in langs.iter().enumerate() {
400				possible[i] &= lang.find_word(word).is_some();
401			}
402
403			// Get an iterator over remaining possible languages.
404			let mut iter = possible.iter().zip(langs.iter()).filter(|(p, _)| **p).map(|(_, l)| l);
405
406			match iter.next() {
407				// If all languages were eliminated, it's an invalid word.
408				None => return Err(Error::UnknownWord(idx)),
409				// If not, see if there is a second one remaining.
410				Some(remaining) => {
411					if iter.next().is_none() {
412						// No second remaining, we found our language.
413						return Ok(*remaining);
414					}
415				}
416			}
417		}
418
419		return Err(Error::AmbiguousLanguages(AmbiguousLanguages(possible)));
420	}
421
422	/// Determine the language of the mnemonic.
423	///
424	/// NOTE: This method only guarantees that the returned language is the
425	/// correct language on the assumption that the mnemonic is valid.
426	/// It does not itself validate the mnemonic.
427	///
428	/// Some word lists don't guarantee that their words don't occur in other
429	/// word lists. In the extremely unlikely case that a word list can be
430	/// interpreted in multiple languages, an [Error::AmbiguousLanguages] is
431	/// returned, containing the possible languages.
432	pub fn language_of<S: AsRef<str>>(mnemonic: S) -> Result<Language, Error> {
433		Mnemonic::language_of_iter(mnemonic.as_ref().split_whitespace())
434	}
435
436	/// Parse a mnemonic in normalized UTF8 in the given language.
437	pub fn parse_in_normalized(language: Language, s: &str) -> Result<Mnemonic, Error> {
438		let nb_words = s.split_whitespace().count();
439		if is_invalid_word_count(nb_words) {
440			return Err(Error::BadWordCount(nb_words));
441		}
442
443		// Here we will store the eventual words.
444		let mut words = [EOF; MAX_NB_WORDS];
445
446		// And here we keep track of the bits to calculate and validate the checksum.
447		// We only use `nb_words * 11` elements in this array.
448		let mut bits = [false; MAX_NB_WORDS * 11];
449
450		for (i, word) in s.split_whitespace().enumerate() {
451			let idx = language.find_word(word).ok_or(Error::UnknownWord(i))?;
452
453			words[i] = idx;
454
455			for j in 0..11 {
456				bits[i * 11 + j] = idx >> (10 - j) & 1 == 1;
457			}
458		}
459
460		// Verify the checksum.
461		// We only use `nb_words / 3 * 4` elements in this array.
462		let mut entropy = [0u8; MAX_NB_WORDS / 3 * 4];
463		let nb_bytes_entropy = nb_words / 3 * 4;
464		for i in 0..nb_bytes_entropy {
465			for j in 0..8 {
466				if bits[i * 8 + j] {
467					entropy[i] += 1 << (7 - j);
468				}
469			}
470		}
471		let check = sha256::Hash::hash(&entropy[0..nb_bytes_entropy]);
472		for i in 0..nb_bytes_entropy / 4 {
473			if bits[8 * nb_bytes_entropy + i] != ((check[i / 8] & (1 << (7 - (i % 8)))) > 0) {
474				return Err(Error::InvalidChecksum);
475			}
476		}
477
478		Ok(Mnemonic {
479			lang: language,
480			words: words,
481		})
482	}
483
484	/// Parse a mnemonic in normalized UTF8 in the given language without checksum check.
485	///
486	/// It is advised to use this method together with the utility methods
487	/// - [Mnemonic::normalize_utf8_cow]
488	/// - [Mnemonic::language_of]
489	pub fn parse_in_normalized_without_checksum_check(
490		language: Language,
491		s: &str,
492	) -> Result<Mnemonic, Error> {
493		let nb_words = s.split_whitespace().count();
494		if is_invalid_word_count(nb_words) {
495			return Err(Error::BadWordCount(nb_words));
496		}
497
498		// Here we will store the eventual words.
499		let mut words = [EOF; MAX_NB_WORDS];
500
501		for (i, word) in s.split_whitespace().enumerate() {
502			let idx = language.find_word(word).ok_or(Error::UnknownWord(i))?;
503
504			words[i] = idx;
505		}
506
507		Ok(Mnemonic {
508			lang: language,
509			words: words,
510		})
511	}
512
513	/// Parse a mnemonic in normalized UTF8.
514	pub fn parse_normalized(s: &str) -> Result<Mnemonic, Error> {
515		let lang = Mnemonic::language_of(s)?;
516		Mnemonic::parse_in_normalized(lang, s)
517	}
518
519	/// Parse a mnemonic in the given language.
520	#[cfg(feature = "unicode-normalization")]
521	pub fn parse_in<'a, S: Into<Cow<'a, str>>>(
522		language: Language,
523		s: S,
524	) -> Result<Mnemonic, Error> {
525		let mut cow = s.into();
526		Mnemonic::normalize_utf8_cow(&mut cow);
527		Ok(Mnemonic::parse_in_normalized(language, cow.as_ref())?)
528	}
529
530	/// Parse a mnemonic and detect the language from the enabled languages.
531	#[cfg(feature = "unicode-normalization")]
532	pub fn parse<'a, S: Into<Cow<'a, str>>>(s: S) -> Result<Mnemonic, Error> {
533		let mut cow = s.into();
534		Mnemonic::normalize_utf8_cow(&mut cow);
535
536		let language = if Language::ALL.len() == 1 {
537			Language::ALL[0]
538		} else {
539			Mnemonic::language_of(cow.as_ref())?
540		};
541
542		Ok(Mnemonic::parse_in_normalized(language, cow.as_ref())?)
543	}
544
545	/// Get the number of words in the mnemonic.
546	pub fn word_count(&self) -> usize {
547		self.word_indices().count()
548	}
549
550	/// Convert to seed bytes with a passphrase in normalized UTF8.
551	pub fn to_seed_normalized(&self, normalized_passphrase: &str) -> [u8; 64] {
552		const PBKDF2_ROUNDS: usize = 2048;
553		const PBKDF2_BYTES: usize = 64;
554
555		let mut seed = [0u8; PBKDF2_BYTES];
556		pbkdf2::pbkdf2(self.words(), normalized_passphrase.as_bytes(), PBKDF2_ROUNDS, &mut seed);
557		seed
558	}
559
560	/// Convert to seed bytes.
561	#[cfg(feature = "unicode-normalization")]
562	pub fn to_seed<'a, P: Into<Cow<'a, str>>>(&self, passphrase: P) -> [u8; 64] {
563		let normalized_passphrase = {
564			let mut cow = passphrase.into();
565			Mnemonic::normalize_utf8_cow(&mut cow);
566			cow
567		};
568		self.to_seed_normalized(normalized_passphrase.as_ref())
569	}
570
571	/// Convert the mnemonic back to the entropy used to generate it.
572	/// The return value is a byte array and the size.
573	/// Use [Mnemonic::to_entropy] (needs `std`) to get a [`Vec<u8>`].
574	pub fn to_entropy_array(&self) -> ([u8; 33], usize) {
575		// We unwrap errors here because this method can only be called on
576		// values that were already previously validated.
577
578		let language = Mnemonic::language_of_iter(self.words()).unwrap();
579
580		// Preallocate enough space for the longest possible word list
581		let mut entropy = [0; 33];
582		let mut cursor = 0;
583		let mut offset = 0;
584		let mut remainder = 0;
585
586		let nb_words = self.word_count();
587		for word in self.words() {
588			let idx = language.find_word(word).expect("invalid mnemonic");
589
590			remainder |= ((idx as u32) << (32 - 11)) >> offset;
591			offset += 11;
592
593			while offset >= 8 {
594				entropy[cursor] = (remainder >> 24) as u8;
595				cursor += 1;
596				remainder <<= 8;
597				offset -= 8;
598			}
599		}
600
601		if offset != 0 {
602			entropy[cursor] = (remainder >> 24) as u8;
603		}
604
605		let entropy_bytes = (nb_words / 3) * 4;
606		(entropy, entropy_bytes)
607	}
608
609	/// Convert the mnemonic back to the entropy used to generate it.
610	#[cfg(feature = "alloc")]
611	pub fn to_entropy(&self) -> Vec<u8> {
612		let (arr, len) = self.to_entropy_array();
613		arr[0..len].to_vec()
614	}
615
616	/// Return checksum value for the Mnemonic.
617	///
618	/// The checksum value is the numerical value of the first `self.word_count() / 3` bits of the
619	/// [SHA256](https://en.wikipedia.org/wiki/SHA-2) digest of the Mnemonic's entropy, and is
620	/// encoded by the last word of the mnemonic sentence.
621	///
622	/// This is useful for validating the integrity of a mnemonic: For a valid mnemonic `m`, the
623	/// following assertion should hold:
624	///
625	/// ```rust
626	/// # use bip39::Mnemonic;
627	/// # use bitcoin_hashes::{Hash, sha256, hex::FromHex};
628	/// # let ent = Vec::from_hex("98FE3D0FF6E955A484B0A1D0C9CE10F6").unwrap();
629	/// # let m = Mnemonic::from_entropy(&ent).unwrap();
630	/// let checksum_width = m.word_count() / 3;
631	/// let shift_width = 8 - checksum_width;
632	/// assert_eq!(sha256::Hash::hash(&m.to_entropy())[0] >> shift_width, m.checksum());
633	/// ```
634	///
635	/// Note that since this library constrains initialization of `Mnemonic` instances through an
636	/// API that guarantees validity, all `Mnemonic` instances should be valid and the above
637	/// condition should hold.
638	pub fn checksum(&self) -> u8 {
639		let word_count = self.word_count();
640		let last_word = self.words[word_count - 1];
641		let mask = 0xFF >> (8 - word_count / 3);
642		last_word as u8 & mask
643	}
644}
645
646impl fmt::Display for Mnemonic {
647	fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
648		for (i, word) in self.words().enumerate() {
649			if i > 0 {
650				f.write_str(" ")?;
651			}
652			f.write_str(word)?;
653		}
654		Ok(())
655	}
656}
657
658impl str::FromStr for Mnemonic {
659	type Err = Error;
660
661	fn from_str(s: &str) -> Result<Mnemonic, Error> {
662		#[cfg(feature = "unicode-normalization")]
663		{
664			Mnemonic::parse(s)
665		}
666		#[cfg(not(feature = "unicode-normalization"))]
667		{
668			Mnemonic::parse_normalized(s)
669		}
670	}
671}
672
673fn is_invalid_word_count(word_count: usize) -> bool {
674	word_count < MIN_NB_WORDS || word_count % 3 != 0 || word_count > MAX_NB_WORDS
675}
676
677#[cfg(test)]
678mod tests {
679	use super::*;
680
681	use bitcoin_hashes::hex::FromHex;
682
683	#[cfg(feature = "rand")]
684	#[test]
685	fn test_language_of() {
686		for lang in Language::ALL {
687			let m = Mnemonic::generate_in(*lang, 24).unwrap();
688			assert_eq!(*lang, Mnemonic::language_of_iter(m.words()).unwrap());
689			assert_eq!(
690				*lang,
691				Mnemonic::language_of_iter(m.to_string().split_whitespace()).unwrap()
692			);
693			assert_eq!(*lang, Mnemonic::language_of(m.to_string()).unwrap());
694			assert_eq!(*lang, Mnemonic::language_of(&m.to_string()).unwrap());
695		}
696	}
697
698	#[cfg(feature = "std")]
699	#[test]
700	fn test_ambiguous_languages() {
701		let mut present = [false; language::MAX_NB_LANGUAGES];
702		let mut present_vec = Vec::new();
703		let mut alternate = true;
704		for i in 0..Language::ALL.len() {
705			present[i] = alternate;
706			if alternate {
707				present_vec.push(Language::ALL[i]);
708			}
709			alternate = !alternate;
710		}
711		let amb = AmbiguousLanguages(present);
712		assert_eq!(amb.to_vec(), present_vec);
713		assert_eq!(amb.iter().collect::<Vec<_>>(), present_vec);
714	}
715
716	#[cfg(feature = "rand")]
717	#[test]
718	fn test_generate() {
719		let _ = Mnemonic::generate(24).unwrap();
720		let _ = Mnemonic::generate_in(Language::English, 24).unwrap();
721		let _ = Mnemonic::generate_in_with(&mut rand::thread_rng(), Language::English, 24).unwrap();
722	}
723
724	#[cfg(feature = "rand")]
725	#[test]
726	fn test_generate_word_counts() {
727		for word_count in [12, 15, 18, 21, 24].iter() {
728			let _ = Mnemonic::generate(*word_count).unwrap();
729		}
730	}
731
732	#[test]
733	fn test_vectors_english() {
734		// These vectors are tuples of
735		// (entropy, mnemonic, seed)
736		let test_vectors = [
737			(
738				"00000000000000000000000000000000",
739				"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about",
740				"c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e53495531f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04",
741			),
742			(
743				"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
744				"legal winner thank year wave sausage worth useful legal winner thank yellow",
745				"2e8905819b8723fe2c1d161860e5ee1830318dbf49a83bd451cfb8440c28bd6fa457fe1296106559a3c80937a1c1069be3a3a5bd381ee6260e8d9739fce1f607",
746			),
747			(
748				"80808080808080808080808080808080",
749				"letter advice cage absurd amount doctor acoustic avoid letter advice cage above",
750				"d71de856f81a8acc65e6fc851a38d4d7ec216fd0796d0a6827a3ad6ed5511a30fa280f12eb2e47ed2ac03b5c462a0358d18d69fe4f985ec81778c1b370b652a8",
751			),
752			(
753				"ffffffffffffffffffffffffffffffff",
754				"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong",
755				"ac27495480225222079d7be181583751e86f571027b0497b5b5d11218e0a8a13332572917f0f8e5a589620c6f15b11c61dee327651a14c34e18231052e48c069",
756			),
757			(
758				"000000000000000000000000000000000000000000000000",
759				"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent",
760				"035895f2f481b1b0f01fcf8c289c794660b289981a78f8106447707fdd9666ca06da5a9a565181599b79f53b844d8a71dd9f439c52a3d7b3e8a79c906ac845fa",
761			),
762			(
763				"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
764				"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will",
765				"f2b94508732bcbacbcc020faefecfc89feafa6649a5491b8c952cede496c214a0c7b3c392d168748f2d4a612bada0753b52a1c7ac53c1e93abd5c6320b9e95dd",
766			),
767			(
768				"808080808080808080808080808080808080808080808080",
769				"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always",
770				"107d7c02a5aa6f38c58083ff74f04c607c2d2c0ecc55501dadd72d025b751bc27fe913ffb796f841c49b1d33b610cf0e91d3aa239027f5e99fe4ce9e5088cd65",
771			),
772			(
773				"ffffffffffffffffffffffffffffffffffffffffffffffff",
774				"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when",
775				"0cd6e5d827bb62eb8fc1e262254223817fd068a74b5b449cc2f667c3f1f985a76379b43348d952e2265b4cd129090758b3e3c2c49103b5051aac2eaeb890a528",
776			),
777			(
778				"0000000000000000000000000000000000000000000000000000000000000000",
779				"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art",
780				"bda85446c68413707090a52022edd26a1c9462295029f2e60cd7c4f2bbd3097170af7a4d73245cafa9c3cca8d561a7c3de6f5d4a10be8ed2a5e608d68f92fcc8",
781			),
782			(
783				"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
784				"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title",
785				"bc09fca1804f7e69da93c2f2028eb238c227f2e9dda30cd63699232578480a4021b146ad717fbb7e451ce9eb835f43620bf5c514db0f8add49f5d121449d3e87",
786			),
787			(
788				"8080808080808080808080808080808080808080808080808080808080808080",
789				"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless",
790				"c0c519bd0e91a2ed54357d9d1ebef6f5af218a153624cf4f2da911a0ed8f7a09e2ef61af0aca007096df430022f7a2b6fb91661a9589097069720d015e4e982f",
791			),
792			(
793				"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
794				"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote",
795				"dd48c104698c30cfe2b6142103248622fb7bb0ff692eebb00089b32d22484e1613912f0a5b694407be899ffd31ed3992c456cdf60f5d4564b8ba3f05a69890ad",
796			),
797			(
798				"9e885d952ad362caeb4efe34a8e91bd2",
799				"ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic",
800				"274ddc525802f7c828d8ef7ddbcdc5304e87ac3535913611fbbfa986d0c9e5476c91689f9c8a54fd55bd38606aa6a8595ad213d4c9c9f9aca3fb217069a41028",
801			),
802			(
803				"6610b25967cdcca9d59875f5cb50b0ea75433311869e930b",
804				"gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog",
805				"628c3827a8823298ee685db84f55caa34b5cc195a778e52d45f59bcf75aba68e4d7590e101dc414bc1bbd5737666fbbef35d1f1903953b66624f910feef245ac",
806			),
807			(
808				"68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c",
809				"hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length",
810				"64c87cde7e12ecf6704ab95bb1408bef047c22db4cc7491c4271d170a1b213d20b385bc1588d9c7b38f1b39d415665b8a9030c9ec653d75e65f847d8fc1fc440",
811			),
812			(
813				"c0ba5a8e914111210f2bd131f3d5e08d",
814				"scheme spot photo card baby mountain device kick cradle pact join borrow",
815				"ea725895aaae8d4c1cf682c1bfd2d358d52ed9f0f0591131b559e2724bb234fca05aa9c02c57407e04ee9dc3b454aa63fbff483a8b11de949624b9f1831a9612",
816			),
817			(
818				"6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3",
819				"horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave",
820				"fd579828af3da1d32544ce4db5c73d53fc8acc4ddb1e3b251a31179cdb71e853c56d2fcb11aed39898ce6c34b10b5382772db8796e52837b54468aeb312cfc3d",
821			),
822			(
823				"9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863",
824				"panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside",
825				"72be8e052fc4919d2adf28d5306b5474b0069df35b02303de8c1729c9538dbb6fc2d731d5f832193cd9fb6aeecbc469594a70e3dd50811b5067f3b88b28c3e8d",
826			),
827			(
828				"23db8160a31d3e0dca3688ed941adbf3",
829				"cat swing flag economy stadium alone churn speed unique patch report train",
830				"deb5f45449e615feff5640f2e49f933ff51895de3b4381832b3139941c57b59205a42480c52175b6efcffaa58a2503887c1e8b363a707256bdd2b587b46541f5",
831			),
832			(
833				"8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0",
834				"light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access",
835				"4cbdff1ca2db800fd61cae72a57475fdc6bab03e441fd63f96dabd1f183ef5b782925f00105f318309a7e9c3ea6967c7801e46c8a58082674c860a37b93eda02",
836			),
837			(
838				"066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad",
839				"all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform",
840				"26e975ec644423f4a4c4f4215ef09b4bd7ef924e85d1d17c4cf3f136c2863cf6df0a475045652c57eb5fb41513ca2a2d67722b77e954b4b3fc11f7590449191d",
841			),
842			(
843				"f30f8c1da665478f49b001d94c5fc452",
844				"vessel ladder alter error federal sibling chat ability sun glass valve picture",
845				"2aaa9242daafcee6aa9d7269f17d4efe271e1b9a529178d7dc139cd18747090bf9d60295d0ce74309a78852a9caadf0af48aae1c6253839624076224374bc63f",
846			),
847			(
848				"c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05",
849				"scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump",
850				"7b4a10be9d98e6cba265566db7f136718e1398c71cb581e1b2f464cac1ceedf4f3e274dc270003c670ad8d02c4558b2f8e39edea2775c9e232c7cb798b069e88",
851			),
852			(
853				"f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f",
854				"void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold",
855				"01f5bced59dec48e362f2c45b5de68b9fd6c92c6634f44d6d40aab69056506f0e35524a518034ddc1192e1dacd32c1ed3eaa3c3b131c88ed8e7e54c49a5d0998",
856			)
857		];
858
859		for vector in &test_vectors {
860			let entropy = Vec::<u8>::from_hex(&vector.0).unwrap();
861			let mnemonic_str = vector.1;
862			let seed = Vec::<u8>::from_hex(&vector.2).unwrap();
863
864			let mnemonic = Mnemonic::from_entropy(&entropy).unwrap();
865
866			assert_eq!(
867				mnemonic,
868				Mnemonic::parse_in_normalized(Language::English, mnemonic_str).unwrap(),
869				"failed vector: {}",
870				mnemonic_str
871			);
872			assert_eq!(
873				mnemonic,
874				Mnemonic::parse_normalized(mnemonic_str).unwrap(),
875				"failed vector: {}",
876				mnemonic_str
877			);
878			assert_eq!(
879				&seed[..],
880				&mnemonic.to_seed_normalized("TREZOR")[..],
881				"failed vector: {}",
882				mnemonic_str
883			);
884
885			#[cfg(feature = "unicode-normalization")]
886			{
887				assert_eq!(&mnemonic.to_string(), mnemonic_str, "failed vector: {}", mnemonic_str);
888				assert_eq!(
889					mnemonic,
890					Mnemonic::parse_in(Language::English, mnemonic_str).unwrap(),
891					"failed vector: {}",
892					mnemonic_str
893				);
894				assert_eq!(
895					mnemonic,
896					Mnemonic::parse(mnemonic_str).unwrap(),
897					"failed vector: {}",
898					mnemonic_str
899				);
900				assert_eq!(
901					&seed[..],
902					&mnemonic.to_seed("TREZOR")[..],
903					"failed vector: {}",
904					mnemonic_str
905				);
906				assert_eq!(&entropy, &mnemonic.to_entropy(), "failed vector: {}", mnemonic_str);
907				assert_eq!(
908					&entropy[..],
909					&mnemonic.to_entropy_array().0[0..entropy.len()],
910					"failed vector: {}",
911					mnemonic_str
912				);
913			}
914		}
915	}
916
917	#[test]
918	fn checksum() {
919		let vectors = [
920			"00000000000000000000000000000000",
921			"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
922			"80808080808080808080808080808080",
923			"ffffffffffffffffffffffffffffffff",
924			"000000000000000000000000000000000000000000000000",
925			"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
926			"808080808080808080808080808080808080808080808080",
927			"ffffffffffffffffffffffffffffffffffffffffffffffff",
928			"0000000000000000000000000000000000000000000000000000000000000000",
929			"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
930			"8080808080808080808080808080808080808080808080808080808080808080",
931			"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
932			"9e885d952ad362caeb4efe34a8e91bd2",
933			"6610b25967cdcca9d59875f5cb50b0ea75433311869e930b",
934			"68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c",
935			"c0ba5a8e914111210f2bd131f3d5e08d",
936			"6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3",
937			"9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863",
938			"23db8160a31d3e0dca3688ed941adbf3",
939			"8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0",
940			"066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad",
941			"f30f8c1da665478f49b001d94c5fc452",
942			"c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05",
943			"f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f",
944			"ed3b83f0d7913a19667a1cfd7298cd57",
945			"70639a4e81b151277b345476d169a3743ff3c141",
946			"ba2520298b92063a7a0ee1d453ba92513af81d4f86e1d336",
947			"9447d2cf44349cd88a58f5b4ff6f83b9a2d54c42f033e12b8e4d00cc",
948			"38711e550dc6557df8082b2a87f7860ebbe47ea5867a7068f5f0f5b85db68be8",
949		];
950
951		for entropy_hex in &vectors {
952			let ent = Vec::from_hex(entropy_hex).unwrap();
953			let m = Mnemonic::from_entropy(&ent).unwrap();
954			let word_count = m.word_count();
955			let cs = m.checksum();
956			let digest = sha256::Hash::hash(&ent);
957			dbg!(digest);
958			assert_eq!(digest[0] >> (8 - word_count / 3), cs);
959		}
960	}
961
962	#[test]
963	fn test_invalid_engish() {
964		// correct phrase:
965		// "letter advice cage absurd amount doctor acoustic avoid letter advice cage above"
966
967		assert_eq!(
968			Mnemonic::parse_normalized(
969				"getter advice cage absurd amount doctor acoustic avoid letter advice cage above",
970			),
971			Err(Error::UnknownWord(0))
972		);
973
974		assert_eq!(
975			Mnemonic::parse_normalized(
976				"letter advice cagex absurd amount doctor acoustic avoid letter advice cage above",
977			),
978			Err(Error::UnknownWord(2))
979		);
980
981		assert_eq!(
982			Mnemonic::parse_normalized(
983				"advice cage absurd amount doctor acoustic avoid letter advice cage above",
984			),
985			Err(Error::BadWordCount(11))
986		);
987
988		assert_eq!(
989			Mnemonic::parse_normalized(
990				"primary advice cage absurd amount doctor acoustic avoid letter advice cage above",
991			),
992			Err(Error::InvalidChecksum)
993		);
994	}
995
996	#[test]
997	fn test_invalid_entropy() {
998		//between 128 and 256 bits, but not divisible by 32
999		assert_eq!(Mnemonic::from_entropy(&vec![b'x'; 17]), Err(Error::BadEntropyBitCount(136)));
1000
1001		//less than 128 bits
1002		assert_eq!(Mnemonic::from_entropy(&vec![b'x'; 4]), Err(Error::BadEntropyBitCount(32)));
1003
1004		//greater than 256 bits
1005		assert_eq!(Mnemonic::from_entropy(&vec![b'x'; 36]), Err(Error::BadEntropyBitCount(288)));
1006	}
1007
1008	#[cfg(all(feature = "japanese", feature = "std"))]
1009	#[test]
1010	fn test_vectors_japanese() {
1011		//! Test some Japanese language test vectors.
1012		//! For these test vectors, we seem to generate different mnemonic phrases than the test
1013		//! vectors expect us to. However, our generated seeds are correct and tiny-bip39,
1014		//! an alternative implementation of bip39 also does not fulfill the test vectors.
1015
1016		// These vectors are tuples of
1017		// (entropy, mnemonic, passphrase, seed)
1018		let vectors = [
1019			(
1020				"00000000000000000000000000000000",
1021				"あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あおぞら",
1022				"㍍ガバヴァぱばぐゞちぢ十人十色",
1023				"a262d6fb6122ecf45be09c50492b31f92e9beb7d9a845987a02cefda57a15f9c467a17872029a9e92299b5cbdf306e3a0ee620245cbd508959b6cb7ca637bd55",
1024			),
1025			(
1026				"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
1027				"そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかめ",
1028				"㍍ガバヴァぱばぐゞちぢ十人十色",
1029				"aee025cbe6ca256862f889e48110a6a382365142f7d16f2b9545285b3af64e542143a577e9c144e101a6bdca18f8d97ec3366ebf5b088b1c1af9bc31346e60d9",
1030			),
1031			(
1032				"80808080808080808080808080808080",
1033				"そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あかちゃん",
1034				"㍍ガバヴァぱばぐゞちぢ十人十色",
1035				"e51736736ebdf77eda23fa17e31475fa1d9509c78f1deb6b4aacfbd760a7e2ad769c714352c95143b5c1241985bcb407df36d64e75dd5a2b78ca5d2ba82a3544",
1036			),
1037			(
1038				"ffffffffffffffffffffffffffffffff",
1039				"われる われる われる われる われる われる われる われる われる われる われる ろんぶん",
1040				"㍍ガバヴァぱばぐゞちぢ十人十色",
1041				"4cd2ef49b479af5e1efbbd1e0bdc117f6a29b1010211df4f78e2ed40082865793e57949236c43b9fe591ec70e5bb4298b8b71dc4b267bb96ed4ed282c8f7761c",
1042			),
1043			(
1044				"000000000000000000000000000000000000000000000000",
1045				"あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あらいぐま",
1046				"㍍ガバヴァぱばぐゞちぢ十人十色",
1047				"d99e8f1ce2d4288d30b9c815ae981edd923c01aa4ffdc5dee1ab5fe0d4a3e13966023324d119105aff266dac32e5cd11431eeca23bbd7202ff423f30d6776d69",
1048			),
1049			(
1050				"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
1051				"そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れいぎ",
1052				"㍍ガバヴァぱばぐゞちぢ十人十色",
1053				"eaaf171efa5de4838c758a93d6c86d2677d4ccda4a064a7136344e975f91fe61340ec8a615464b461d67baaf12b62ab5e742f944c7bd4ab6c341fbafba435716",
1054			),
1055			(
1056				"808080808080808080808080808080808080808080808080",
1057				"そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら いきなり",
1058				"㍍ガバヴァぱばぐゞちぢ十人十色",
1059				"aec0f8d3167a10683374c222e6e632f2940c0826587ea0a73ac5d0493b6a632590179a6538287641a9fc9df8e6f24e01bf1be548e1f74fd7407ccd72ecebe425",
1060			),
1061			(
1062				"ffffffffffffffffffffffffffffffffffffffffffffffff",
1063				"われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる りんご",
1064				"㍍ガバヴァぱばぐゞちぢ十人十色",
1065				"f0f738128a65b8d1854d68de50ed97ac1831fc3a978c569e415bbcb431a6a671d4377e3b56abd518daa861676c4da75a19ccb41e00c37d086941e471a4374b95",
1066			),
1067			(
1068				"0000000000000000000000000000000000000000000000000000000000000000",
1069				"あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん いってい",
1070				"㍍ガバヴァぱばぐゞちぢ十人十色",
1071				"23f500eec4a563bf90cfda87b3e590b211b959985c555d17e88f46f7183590cd5793458b094a4dccc8f05807ec7bd2d19ce269e20568936a751f6f1ec7c14ddd",
1072			),
1073			(
1074				"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
1075				"そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん まんきつ",
1076				"㍍ガバヴァぱばぐゞちぢ十人十色",
1077				"cd354a40aa2e241e8f306b3b752781b70dfd1c69190e510bc1297a9c5738e833bcdc179e81707d57263fb7564466f73d30bf979725ff783fb3eb4baa86560b05",
1078			),
1079			(
1080				"8080808080808080808080808080808080808080808080808080808080808080",
1081				"そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる うめる",
1082				"㍍ガバヴァぱばぐゞちぢ十人十色",
1083				"6b7cd1b2cdfeeef8615077cadd6a0625f417f287652991c80206dbd82db17bf317d5c50a80bd9edd836b39daa1b6973359944c46d3fcc0129198dc7dc5cd0e68",
1084			),
1085			(
1086				"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
1087				"われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる らいう",
1088				"㍍ガバヴァぱばぐゞちぢ十人十色",
1089				"a44ba7054ac2f9226929d56505a51e13acdaa8a9097923ca07ea465c4c7e294c038f3f4e7e4b373726ba0057191aced6e48ac8d183f3a11569c426f0de414623",
1090			),
1091			(
1092				"77c2b00716cec7213839159e404db50d",
1093				"せまい うちがわ あずき かろう めずらしい だんち ますく おさめる ていぼう あたる すあな えしゃく",
1094				"㍍ガバヴァぱばぐゞちぢ十人十色",
1095				"344cef9efc37d0cb36d89def03d09144dd51167923487eec42c487f7428908546fa31a3c26b7391a2b3afe7db81b9f8c5007336b58e269ea0bd10749a87e0193",
1096			),
1097			(
1098				"b63a9c59a6e641f288ebc103017f1da9f8290b3da6bdef7b",
1099				"ぬすむ ふっかつ うどん こうりつ しつじ りょうり おたがい せもたれ あつめる いちりゅう はんしゃ ごますり そんけい たいちょう らしんばん ぶんせき やすみ ほいく",
1100				"㍍ガバヴァぱばぐゞちぢ十人十色",
1101				"b14e7d35904cb8569af0d6a016cee7066335a21c1c67891b01b83033cadb3e8a034a726e3909139ecd8b2eb9e9b05245684558f329b38480e262c1d6bc20ecc4",
1102			),
1103			(
1104				"3e141609b97933b66a060dcddc71fad1d91677db872031e85f4c015c5e7e8982",
1105				"くのう てぬぐい そんかい すろっと ちきゅう ほあん とさか はくしゅ ひびく みえる そざい てんすう たんぴん くしょう すいようび みけん きさらぎ げざん ふくざつ あつかう はやい くろう おやゆび こすう",
1106				"㍍ガバヴァぱばぐゞちぢ十人十色",
1107				"32e78dce2aff5db25aa7a4a32b493b5d10b4089923f3320c8b287a77e512455443298351beb3f7eb2390c4662a2e566eec5217e1a37467af43b46668d515e41b",
1108			),
1109			(
1110				"0460ef47585604c5660618db2e6a7e7f",
1111				"あみもの いきおい ふいうち にげる ざんしょ じかん ついか はたん ほあん すんぽう てちがい わかめ",
1112				"㍍ガバヴァぱばぐゞちぢ十人十色",
1113				"0acf902cd391e30f3f5cb0605d72a4c849342f62bd6a360298c7013d714d7e58ddf9c7fdf141d0949f17a2c9c37ced1d8cb2edabab97c4199b142c829850154b",
1114			),
1115			(
1116				"72f60ebac5dd8add8d2a25a797102c3ce21bc029c200076f",
1117				"すろっと にくしみ なやむ たとえる へいこう すくう きない けってい とくべつ ねっしん いたみ せんせい おくりがな まかい とくい けあな いきおい そそぐ",
1118				"㍍ガバヴァぱばぐゞちぢ十人十色",
1119				"9869e220bec09b6f0c0011f46e1f9032b269f096344028f5006a6e69ea5b0b8afabbb6944a23e11ebd021f182dd056d96e4e3657df241ca40babda532d364f73",
1120			),
1121			(
1122				"2c85efc7f24ee4573d2b81a6ec66cee209b2dcbd09d8eddc51e0215b0b68e416",
1123				"かほご きうい ゆたか みすえる もらう がっこう よそう ずっと ときどき したうけ にんか はっこう つみき すうじつ よけい くげん もくてき まわり せめる げざい にげる にんたい たんそく ほそく",
1124				"㍍ガバヴァぱばぐゞちぢ十人十色",
1125				"713b7e70c9fbc18c831bfd1f03302422822c3727a93a5efb9659bec6ad8d6f2c1b5c8ed8b0b77775feaf606e9d1cc0a84ac416a85514ad59f5541ff5e0382481",
1126			),
1127			(
1128				"eaebabb2383351fd31d703840b32e9e2",
1129				"めいえん さのう めだつ すてる きぬごし ろんぱ はんこ まける たいおう さかいし ねんいり はぶらし",
1130				"㍍ガバヴァぱばぐゞちぢ十人十色",
1131				"06e1d5289a97bcc95cb4a6360719131a786aba057d8efd603a547bd254261c2a97fcd3e8a4e766d5416437e956b388336d36c7ad2dba4ee6796f0249b10ee961",
1132			),
1133			(
1134				"7ac45cfe7722ee6c7ba84fbc2d5bd61b45cb2fe5eb65aa78",
1135				"せんぱい おしえる ぐんかん もらう きあい きぼう やおや いせえび のいず じゅしん よゆう きみつ さといも ちんもく ちわわ しんせいじ とめる はちみつ",
1136				"㍍ガバヴァぱばぐゞちぢ十人十色",
1137				"1fef28785d08cbf41d7a20a3a6891043395779ed74503a5652760ee8c24dfe60972105ee71d5168071a35ab7b5bd2f8831f75488078a90f0926c8e9171b2bc4a",
1138			),
1139			(
1140				"4fa1a8bc3e6d80ee1316050e862c1812031493212b7ec3f3bb1b08f168cabeef",
1141				"こころ いどう きあつ そうがんきょう へいあん せつりつ ごうせい はいち いびき きこく あんい おちつく きこえる けんとう たいこ すすめる はっけん ていど はんおん いんさつ うなぎ しねま れいぼう みつかる",
1142				"㍍ガバヴァぱばぐゞちぢ十人十色",
1143				"43de99b502e152d4c198542624511db3007c8f8f126a30818e856b2d8a20400d29e7a7e3fdd21f909e23be5e3c8d9aee3a739b0b65041ff0b8637276703f65c2",
1144			),
1145			(
1146				"18ab19a9f54a9274f03e5209a2ac8a91",
1147				"うりきれ さいせい じゆう むろん とどける ぐうたら はいれつ ひけつ いずれ うちあわせ おさめる おたく",
1148				"㍍ガバヴァぱばぐゞちぢ十人十色",
1149				"3d711f075ee44d8b535bb4561ad76d7d5350ea0b1f5d2eac054e869ff7963cdce9581097a477d697a2a9433a0c6884bea10a2193647677977c9820dd0921cbde",
1150			),
1151			(
1152				"18a2e1d81b8ecfb2a333adcb0c17a5b9eb76cc5d05db91a4",
1153				"うりきれ うねる せっさたくま きもち めんきょ へいたく たまご ぜっく びじゅつかん さんそ むせる せいじ ねくたい しはらい せおう ねんど たんまつ がいけん",
1154				"㍍ガバヴァぱばぐゞちぢ十人十色",
1155				"753ec9e333e616e9471482b4b70a18d413241f1e335c65cd7996f32b66cf95546612c51dcf12ead6f805f9ee3d965846b894ae99b24204954be80810d292fcdd",
1156			),
1157			(
1158				"15da872c95a13dd738fbf50e427583ad61f18fd99f628c417a61cf8343c90419",
1159				"うちゅう ふそく ひしょ がちょう うけもつ めいそう みかん そざい いばる うけとる さんま さこつ おうさま ぱんつ しひょう めした たはつ いちぶ つうじょう てさぎょう きつね みすえる いりぐち かめれおん",
1160				"㍍ガバヴァぱばぐゞちぢ十人十色",
1161				"346b7321d8c04f6f37b49fdf062a2fddc8e1bf8f1d33171b65074531ec546d1d3469974beccb1a09263440fc92e1042580a557fdce314e27ee4eabb25fa5e5fe",
1162			)
1163		];
1164
1165		for vector in &vectors {
1166			let entropy = Vec::<u8>::from_hex(&vector.0).unwrap();
1167			let mnemonic_str = vector.1;
1168			let passphrase = vector.2;
1169			let seed = Vec::<u8>::from_hex(&vector.3).unwrap();
1170
1171			let mnemonic = Mnemonic::from_entropy_in(Language::Japanese, &entropy).unwrap();
1172
1173			assert_eq!(seed, &mnemonic.to_seed(passphrase)[..], "failed vector: {}", mnemonic_str);
1174			let rt = Mnemonic::parse_in(Language::Japanese, mnemonic.to_string())
1175				.expect(&format!("vector: {}", mnemonic_str));
1176			assert_eq!(seed, &rt.to_seed(passphrase)[..]);
1177
1178			let mnemonic = Mnemonic::parse_in(Language::Japanese, mnemonic_str)
1179				.expect(&format!("vector: {}", mnemonic_str));
1180			assert_eq!(seed, &mnemonic.to_seed(passphrase)[..], "failed vector: {}", mnemonic_str);
1181		}
1182	}
1183}