Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for no_std #44

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 17 additions & 9 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "tiny-bip39"
version = "1.0.0"
version = "1.0.1"
authors = [
"Stephen Oliver <[email protected]>",
"Maciej Hirsz <[email protected]>",
Expand All @@ -27,21 +27,29 @@ italian = []
japanese = []
korean = []
spanish = []
default-langs = ["chinese-simplified", "chinese-traditional", "french", "italian", "japanese", "korean", "spanish"]
default-langs = [
"chinese-simplified",
"chinese-traditional",
"french",
"italian",
"japanese",
"korean",
"spanish",
]
default = [ "default-langs", "rand", "std" ]
std = ["sha2/std", "anyhow/std", "rustc-hash/std", "unicode-normalization/std" ]

default = ["default-langs", "rand"]

[dependencies]
anyhow = "1.0.57"
thiserror = "1.0.31"
rustc-hash = "1.1.0"
sha2 = "0.10.2"
anyhow = {version = "1.0.57", default-features = false }
rustc-hash = { version = "1.1.0", default-features = false }
sha2 = { version = "0.10.2", default-features = false }
hmac = "0.12.1"
pbkdf2 = { version = "0.11.0", default-features = false }
rand = { version = "0.8.5", optional = true }
once_cell = "1.12.0"
unicode-normalization = "0.1.19"
unicode-normalization = { version = "0.1.19", default-features = false }
zeroize = { version = "1.5.5", features = ["zeroize_derive"] }
lazy_static = { version = "1.4.0", default-features = false, features = [ "spin_no_std" ] }

[dev-dependencies]
hex = "0.4.3"
Expand Down
1 change: 1 addition & 0 deletions src/crypto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use hmac::Hmac;
#[cfg(feature = "rand")]
use rand::{thread_rng, RngCore};
use sha2::Digest;
use crate::Vec;

const PBKDF2_ROUNDS: u32 = 2048;
const PBKDF2_BYTES: usize = 64;
Expand Down
25 changes: 18 additions & 7 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,31 @@
use crate::mnemonic_type::MnemonicType;
use thiserror::Error;
use core::fmt;

#[derive(Debug, Error)]
#[derive(Debug)]
pub enum ErrorKind {
#[error("invalid checksum")]
InvalidChecksum,
#[error("invalid word in phrase")]
InvalidWord,
#[error("invalid keysize: {0}")]
InvalidKeysize(usize),
#[error("invalid number of words in phrase: {0}")]
InvalidWordLength(usize),
#[error("invalid entropy length {0}bits for mnemonic type {1:?}")]
InvalidEntropyLength(usize, MnemonicType),
}

impl fmt::Display for ErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::InvalidChecksum => write!(f, "invalid checksum"),
Self::InvalidWord => write!(f, "invalid word in phrase"),
Self::InvalidKeysize(u) => write!(f, "invalid keysize: {0}", u),
Self::InvalidWordLength(u) => write!(f, "invalid number of words in phrase: {0}", u),
Self::InvalidEntropyLength(u, m) => write!(
f,
"invalid entropy length {0}bits for mnemonic type {1:?}",
u, m
),
}
}
}

#[cfg(test)]
mod test {
use super::*;
Expand Down
106 changes: 56 additions & 50 deletions src/language.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
use crate::error::ErrorKind;
use crate::util::{Bits, Bits11};
use crate::{
error::ErrorKind,
util::{Bits, Bits11},
Vec,
};
#[cfg(feature = "std")]
use rustc_hash::FxHashMap;
#[cfg(not(feature = "std"))]
type FxHashMap<K, V> = alloc::collections::BTreeMap<K, V>;

pub struct WordMap {
inner: FxHashMap<&'static str, Bits11>,
Expand All @@ -25,10 +31,9 @@ impl WordList {
}

pub fn get_words_by_prefix(&self, prefix: &str) -> &[&'static str] {
let start = self.inner
.binary_search(&prefix)
.unwrap_or_else(|idx| idx);
let count = self.inner[start..].iter()
let start = self.inner.binary_search(&prefix).unwrap_or_else(|idx| idx);
let count = self.inner[start..]
.iter()
.take_while(|word| word.starts_with(prefix))
.count();

Expand All @@ -38,7 +43,7 @@ impl WordList {

mod lazy {
use super::{Bits11, WordList, WordMap};
use once_cell::sync::Lazy;
use crate::Vec;

/// lazy generation of the word list
fn gen_wordlist(lang_words: &'static str) -> WordList {
Expand All @@ -60,48 +65,49 @@ mod lazy {

WordMap { inner }
}

pub static WORDLIST_ENGLISH: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/english.txt")));
#[cfg(feature = "chinese-simplified")]
pub static WORDLIST_CHINESE_SIMPLIFIED: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/chinese_simplified.txt")));
#[cfg(feature = "chinese-traditional")]
pub static WORDLIST_CHINESE_TRADITIONAL: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/chinese_traditional.txt")));
#[cfg(feature = "french")]
pub static WORDLIST_FRENCH: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/french.txt")));
#[cfg(feature = "italian")]
pub static WORDLIST_ITALIAN: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/italian.txt")));
#[cfg(feature = "japanese")]
pub static WORDLIST_JAPANESE: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/japanese.txt")));
#[cfg(feature = "korean")]
pub static WORDLIST_KOREAN: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/korean.txt")));
#[cfg(feature = "spanish")]
pub static WORDLIST_SPANISH: Lazy<WordList> =
Lazy::new(|| gen_wordlist(include_str!("langs/spanish.txt")));

pub static WORDMAP_ENGLISH: Lazy<WordMap> = Lazy::new(|| gen_wordmap(&WORDLIST_ENGLISH));
#[cfg(feature = "chinese-simplified")]
pub static WORDMAP_CHINESE_SIMPLIFIED: Lazy<WordMap> =
Lazy::new(|| gen_wordmap(&WORDLIST_CHINESE_SIMPLIFIED));
#[cfg(feature = "chinese-traditional")]
pub static WORDMAP_CHINESE_TRADITIONAL: Lazy<WordMap> =
Lazy::new(|| gen_wordmap(&WORDLIST_CHINESE_TRADITIONAL));
#[cfg(feature = "french")]
pub static WORDMAP_FRENCH: Lazy<WordMap> = Lazy::new(|| gen_wordmap(&WORDLIST_FRENCH));
#[cfg(feature = "italian")]
pub static WORDMAP_ITALIAN: Lazy<WordMap> = Lazy::new(|| gen_wordmap(&WORDLIST_ITALIAN));
#[cfg(feature = "japanese")]
pub static WORDMAP_JAPANESE: Lazy<WordMap> = Lazy::new(|| gen_wordmap(&WORDLIST_JAPANESE));
#[cfg(feature = "korean")]
pub static WORDMAP_KOREAN: Lazy<WordMap> = Lazy::new(|| gen_wordmap(&WORDLIST_KOREAN));
#[cfg(feature = "spanish")]
pub static WORDMAP_SPANISH: Lazy<WordMap> = Lazy::new(|| gen_wordmap(&WORDLIST_SPANISH));
lazy_static::lazy_static! {
pub static ref WORDLIST_ENGLISH: WordList =
gen_wordlist(include_str!("langs/english.txt"));
#[cfg(feature = "chinese-simplified")]
pub static ref WORDLIST_CHINESE_SIMPLIFIED: WordList =
gen_wordlist(include_str!("langs/chinese_simplified.txt"));
#[cfg(feature = "chinese-traditional")]
pub static ref WORDLIST_CHINESE_TRADITIONAL: WordList =
gen_wordlist(include_str!("langs/chinese_traditional.txt"));
#[cfg(feature = "french")]
pub static ref WORDLIST_FRENCH: WordList =
gen_wordlist(include_str!("langs/french.txt"));
#[cfg(feature = "italian")]
pub static ref WORDLIST_ITALIAN: WordList =
gen_wordlist(include_str!("langs/italian.txt"));
#[cfg(feature = "japanese")]
pub static ref WORDLIST_JAPANESE: WordList =
gen_wordlist(include_str!("langs/japanese.txt"));
#[cfg(feature = "korean")]
pub static ref WORDLIST_KOREAN: WordList =
gen_wordlist(include_str!("langs/korean.txt"));
#[cfg(feature = "spanish")]
pub static ref WORDLIST_SPANISH: WordList =
gen_wordlist(include_str!("langs/spanish.txt"));

pub static ref WORDMAP_ENGLISH: WordMap = gen_wordmap(&WORDLIST_ENGLISH);
#[cfg(feature = "chinese-simplified")]
pub static ref WORDMAP_CHINESE_SIMPLIFIED: WordMap =
gen_wordmap(&WORDLIST_CHINESE_SIMPLIFIED);
#[cfg(feature = "chinese-traditional")]
pub static ref WORDMAP_CHINESE_TRADITIONAL: WordMap =
gen_wordmap(&WORDLIST_CHINESE_TRADITIONAL);
#[cfg(feature = "french")]
pub static ref WORDMAP_FRENCH: WordMap = gen_wordmap(&WORDLIST_FRENCH);
#[cfg(feature = "italian")]
pub static ref WORDMAP_ITALIAN: WordMap = gen_wordmap(&WORDLIST_ITALIAN);
#[cfg(feature = "japanese")]
pub static ref WORDMAP_JAPANESE: WordMap = gen_wordmap(&WORDLIST_JAPANESE);
#[cfg(feature = "korean")]
pub static ref WORDMAP_KOREAN: WordMap = gen_wordmap(&WORDLIST_KOREAN);
#[cfg(feature = "spanish")]
pub static ref WORDMAP_SPANISH: WordMap = gen_wordmap(&WORDLIST_SPANISH);
}
}

/// The language determines which words will be used in a mnemonic phrase, but also indirectly
Expand Down Expand Up @@ -220,7 +226,7 @@ mod test {
fn words_by_prefix() {
let wl = &lazy::WORDLIST_ENGLISH;
let res = wl.get_words_by_prefix("woo");
assert_eq!(res, ["wood","wool"]);
assert_eq!(res, ["wood", "wool"]);
}

#[cfg_attr(all(target_arch = "wasm32"), wasm_bindgen_test)]
Expand Down
12 changes: 12 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,18 @@
//! println!("{:X}", seed);
//! ```
//!

#![cfg_attr(not(feature = "std"), no_std)]

#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;

#[cfg(not(feature = "std"))]
pub(crate) use alloc::{string::String, vec::Vec};
#[cfg(feature = "std")]
pub(crate) use {String, Vec};

mod error;
mod language;
mod mnemonic;
Expand Down
10 changes: 6 additions & 4 deletions src/mnemonic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@ use crate::error::ErrorKind;
use crate::language::Language;
use crate::mnemonic_type::MnemonicType;
use crate::util::{checksum, BitWriter, IterExt};
use crate::String;
use crate::Vec;
use anyhow::Error;
use std::fmt;
use std::mem;
use core::fmt;
use core::mem;
use unicode_normalization::UnicodeNormalization;
use zeroize::Zeroizing;

Expand Down Expand Up @@ -197,7 +199,7 @@ impl Mnemonic {
let mut bits = BitWriter::with_capacity(264);

for word in phrase.split(" ") {
bits.push(wordmap.get_bits(&word)?);
bits.push(wordmap.get_bits(&word).map_err(Error::msg)?);
}

let mtype = MnemonicType::for_word_count(bits.len() / 11)?;
Expand All @@ -219,7 +221,7 @@ impl Mnemonic {
let expected_checksum = checksum(checksum_byte, mtype.checksum_bits());

if actual_checksum != expected_checksum {
Err(ErrorKind::InvalidChecksum)?;
Err(Error::msg(ErrorKind::InvalidChecksum))?;
}

Ok(entropy)
Expand Down
6 changes: 3 additions & 3 deletions src/mnemonic_type.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::fmt;
use core::fmt;
use anyhow::Error;
use crate::error::ErrorKind;

Expand Down Expand Up @@ -58,7 +58,7 @@ impl MnemonicType {
18 => MnemonicType::Words18,
21 => MnemonicType::Words21,
24 => MnemonicType::Words24,
_ => Err(ErrorKind::InvalidWordLength(size))?,
_ => Err(Error::msg(ErrorKind::InvalidWordLength(size)))?,
};

Ok(mnemonic_type)
Expand All @@ -82,7 +82,7 @@ impl MnemonicType {
192 => MnemonicType::Words18,
224 => MnemonicType::Words21,
256 => MnemonicType::Words24,
_ => Err(ErrorKind::InvalidKeysize(size))?,
_ => Err(Error::msg(ErrorKind::InvalidKeysize(size)))?,
};

Ok(mnemonic_type)
Expand Down
18 changes: 14 additions & 4 deletions src/seed.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
use std::fmt;
use unicode_normalization::UnicodeNormalization;
use zeroize::{Zeroize, Zeroizing};
use core::fmt;
extern crate alloc;
#[cfg(not(feature = "std"))]
use crate::alloc::string::ToString;
use crate::crypto::pbkdf2;
use crate::mnemonic::Mnemonic;
use crate::Vec;
use alloc::format;
use unicode_normalization::UnicodeNormalization;
use zeroize::{Zeroize, Zeroizing};

/// The secret value used to derive HD wallet addresses from a [`Mnemonic`][Mnemonic] phrase.
///
Expand Down Expand Up @@ -108,7 +113,12 @@ mod test {
assert_eq!(format!("{:#X}", seed), "0x0BDE96F14C35A66235478E0C16C152FCAF6301E4D9A81D3FEBC50879FE7E5438E6A8DD3E39BDF3AB7B12D6B44218710E17D7A2844EE9633FAB0E03D9A6C8569B");
}

fn test_unicode_normalization(lang: Language, phrase: &str, password: &str, expected_seed_hex: &str) {
fn test_unicode_normalization(
lang: Language,
phrase: &str,
password: &str,
expected_seed_hex: &str,
) {
let mnemonic = Mnemonic::from_phrase(phrase, lang).unwrap();
let seed = Seed::new(&mnemonic, password);
assert_eq!(format!("{:x}", seed), expected_seed_hex);
Expand Down
6 changes: 4 additions & 2 deletions src/util.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use crate::String;
use crate::Vec;
use unicode_normalization::Decompositions;

pub(crate) trait IterExt: Iterator {
Expand Down Expand Up @@ -148,7 +150,7 @@ impl BitWriter {
}

pub(crate) struct BitIter<In: Bits, Out: Bits, I: Iterator<Item = In> + Sized> {
_phantom: ::std::marker::PhantomData<Out>,
_phantom: core::marker::PhantomData<Out>,
source: I,
read: usize,
buffer: u64,
Expand All @@ -164,7 +166,7 @@ where
let source = source.into_iter();

BitIter {
_phantom: ::std::marker::PhantomData,
_phantom: core::marker::PhantomData,
source,
read: 0,
buffer: 0,
Expand Down
Loading