Skip to content

Commit

Permalink
Merge pull request #77 from mangata-finance/feature/do_not_use_babe_r…
Browse files Browse the repository at this point in the history
…andomness

Feature/do not use babe randomness
  • Loading branch information
mateuszaaa authored Jun 29, 2021
2 parents 92de842 + e860e25 commit c93a035
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 22 deletions.
26 changes: 7 additions & 19 deletions client/consensus/babe/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -698,20 +698,11 @@ impl<B, C, E, I, Error, SO> SlotWorker<B> for BabeSlotWorker<B, C, E, I, SO> whe
let epoch_data = <Self as sc_consensus_slots::SimpleSlotWorker<B>>::epoch_data(self, &chain_head, slot_info.number).unwrap();

if let Some((_, public)) = <Self as sc_consensus_slots::SimpleSlotWorker<B>>::claim_slot(self, &chain_head, slot_info.number, &epoch_data){
let changes = self.epoch_changes.lock();
let signature = changes.viable_epoch(
&epoch_data,
|slot| Epoch::genesis(&self.config, slot)
)
.ok_or(sp_consensus::Error::StateUnavailable(String::from("cannot fetch epoch for seed generation purposes")))
.and_then(|epoch|
{
let transcript_data = create_shuffling_seed_input_data(&seed, &epoch.as_ref());
self.keystore.read()
.sr25519_vrf_sign(<AuthorityId as AppKey>::ID, &public.into(), transcript_data)
.map_err(|_| sp_consensus::Error::StateUnavailable(String::from("signing seed failure")))
}
);

let transcript_data = create_shuffling_seed_input_data(&seed);
let signature = self.keystore.read()
.sr25519_vrf_sign(<AuthorityId as AppKey>::ID, &public.into(), transcript_data)
.map_err(|_| sp_consensus::Error::StateUnavailable(String::from("signing seed failure")));

let inject_randome_seed_inherent_data = signature.and_then(|sig| {
RandomSeedInherentDataProvider(
Expand Down Expand Up @@ -833,12 +824,11 @@ impl<Block: BlockT> BabeLink<Block> {
}

/// calculates input that after signing will become next shuffling seed
fn create_shuffling_seed_input_data<'a>(prev_seed: &'a SeedType, epoch: &'a Epoch) -> vrf::VRFTranscriptData<'a>{
fn create_shuffling_seed_input_data<'a>(prev_seed: &'a SeedType) -> vrf::VRFTranscriptData<'a>{
vrf::VRFTranscriptData {
label: b"shuffling_seed",
items: vec![
("prev_seed", vrf::VRFTranscriptValue::Bytes(&prev_seed.seed)),
("epoch_randomness", vrf::VRFTranscriptValue::Bytes(&epoch.randomness)),
]
}
}
Expand Down Expand Up @@ -981,7 +971,6 @@ where
block_id: &BlockId<Block>,
inherents: Vec<Block::Extrinsic>,
public_key: &[u8],
epoch: &Epoch
) -> Result<(), Error<Block>> {
let runtime_api = self.client.runtime_api();

Expand All @@ -994,7 +983,7 @@ where
.map_err(|_| Error::SeedVerificationErrorStr(String::from("cannot deserialize seed")))?;
let proof = VRFProof::from_bytes(&new.proof)
.map_err(|_| Error::SeedVerificationErrorStr(String::from("cannot deserialize seed proof")))?;
let input = make_transcript(create_shuffling_seed_input_data(&prev, &epoch));
let input = make_transcript(create_shuffling_seed_input_data(&prev));

schnorrkel::PublicKey::from_bytes(public_key).and_then(|p| {
p.vrf_verify(input, &output, &proof)
Expand Down Expand Up @@ -1113,7 +1102,6 @@ where
&BlockId::Hash(parent_hash),
extrinsics,
key.0.as_ref(),
viable_epoch.as_ref()
)?;

trace!(target: "babe", "Checked {:?}; importing.", pre_header);
Expand Down
11 changes: 9 additions & 2 deletions node/src/chain_spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
use hex_literal::hex;
use mangata_runtime::{
AccountId, AssetsInfoConfig, BabeConfig, BalancesConfig, BridgeConfig, BridgedAssetConfig,
GenesisConfig, GrandpaConfig, SessionConfig, SessionKeys, Signature, StakerStatus,
StakingConfig, SudoConfig, SystemConfig, TokensConfig, VerifierConfig, XykConfig, WASM_BINARY,
GenesisConfig, GrandpaConfig, RandomConfig, SessionConfig, SessionKeys, Signature,
StakerStatus, StakingConfig, SudoConfig, SystemConfig, TokensConfig, VerifierConfig, XykConfig,
WASM_BINARY,
};
use sc_service::ChainType;
use sp_consensus_babe::AuthorityId as BabeId;
Expand Down Expand Up @@ -143,6 +144,7 @@ pub fn development_config() -> Result<ChainSpec, String> {
10_000__000_000_000_000_000_000u128,
)],
true,
[0_u8; 32],
)
},
// Bootnodes
Expand Down Expand Up @@ -254,6 +256,7 @@ pub fn local_testnet_config() -> Result<ChainSpec, String> {
10_000__000_000_000_000_000_000u128,
)],
true,
[0_u8; 32],
)
},
// Bootnodes
Expand Down Expand Up @@ -282,6 +285,7 @@ fn testnet_genesis(
endowed_accounts: Vec<AccountId>,
staking_accounts: Vec<(AccountId, u32, u128, u32, u128, u32, u128)>,
_enable_println: bool,
init_seed: [u8; 32],
) -> GenesisConfig {
GenesisConfig {
frame_system: Some(SystemConfig {
Expand Down Expand Up @@ -426,5 +430,8 @@ fn testnet_genesis(
})
.collect(),
}),
pallet_random_seed: Some(RandomConfig {
random_seed: init_seed,
}),
}
}
10 changes: 10 additions & 0 deletions pallets/random-seed/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,16 @@ decl_storage! {
/// Current time for the current block.
pub Seed get(fn seed) : SeedType;
}
add_extra_genesis {
#[allow(clippy::type_complexity)]
config(random_seed): [u8; 32];
build(|config: &GenesisConfig|{
Seed::set(SeedType{
seed: config.random_seed,
proof: [0_u8; 64]
});
});
}
}

impl<T: Trait> Module<T> {
Expand Down
2 changes: 1 addition & 1 deletion runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ construct_runtime!(
System: frame_system::{Module, Call, Config, Storage, Event<T>},
RandomnessCollectiveFlip: pallet_randomness_collective_flip::{Module, Call, Storage},
Timestamp: pallet_timestamp::{Module, Call, Storage, Inherent},
Random: pallet_random_seed::{Module, Call, Storage, Inherent},
Random: pallet_random_seed::{Module, Call, Storage, Inherent, Config},
Session: pallet_session::{Module, Call, Storage, Event, Config<T>},
Authorship: pallet_authorship::{Module, Call, Storage, Inherent},
Babe: pallet_babe::{Module, Call, Storage, Config, Inherent, ValidateUnsigned},
Expand Down

0 comments on commit c93a035

Please sign in to comment.