Skip to content

Commit

Permalink
Use compact proofs for parachains (#2194)
Browse files Browse the repository at this point in the history
* Use compact proofs for parachains

* Remove StorageProofChecker

* Cleanup
  • Loading branch information
serban300 committed Jun 7, 2023
1 parent 3e8e435 commit 2632d78
Show file tree
Hide file tree
Showing 17 changed files with 243 additions and 434 deletions.
17 changes: 6 additions & 11 deletions bin/runtime-common/src/messages_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ use bp_messages::{
};
use bp_polkadot_core::parachains::ParaHash;
use bp_runtime::{
grow_trie_leaf_value, record_all_trie_keys, AccountIdOf, Chain, HashOf, HasherOf, Parachain,
StorageProofSize, UntrustedVecDb,
grow_trie_leaf_value, AccountIdOf, Chain, HashOf, HasherOf, Parachain, StorageProofSize,
UntrustedVecDb,
};
use codec::Encode;
use frame_support::{weights::Weight, StateVersion};
Expand All @@ -40,9 +40,7 @@ use pallet_bridge_messages::{
};
use sp_runtime::traits::{Header, Zero};
use sp_std::prelude::*;
use sp_trie::{
LayoutV0, LayoutV1, MemoryDB, StorageProof, TrieConfiguration, TrieDBMutBuilder, TrieMut,
};
use sp_trie::{LayoutV0, LayoutV1, MemoryDB, TrieConfiguration, TrieDBMutBuilder, TrieMut};
use xcm::v3::prelude::*;

/// Prepare inbound bridge message according to given message proof parameters.
Expand Down Expand Up @@ -285,15 +283,12 @@ where
}

// generate storage proof to be delivered to This chain
let read_proof = record_all_trie_keys::<L, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage_proof = UntrustedVecDb::try_new::<HasherOf<BridgedChainOf<R, MI>>>(
StorageProof::new(read_proof),
let storage_proof = UntrustedVecDb::try_from_db::<HasherOf<BridgedChainOf<R, MI>>, _>(
&mdb,
root,
vec![storage_key],
)
.unwrap();
.expect("UntrustedVecDb::try_from_db() should not fail in benchmarks");

(root, storage_proof)
}
Expand Down
48 changes: 37 additions & 11 deletions bin/runtime-common/src/parachains_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,13 @@ use crate::messages_benchmarking::insert_header_to_grandpa_pallet;

use bp_parachains::parachain_head_storage_key_at_source;
use bp_polkadot_core::parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId};
use bp_runtime::{grow_trie_leaf_value, record_all_trie_keys, StorageProofSize};
use bp_runtime::{grow_trie_leaf_value, Chain, StorageProofSize, UntrustedVecDb};
use codec::Encode;
use frame_support::traits::Get;
use frame_support::{traits::Get, StateVersion};
use pallet_bridge_grandpa::BridgedChain;
use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber};
use sp_std::prelude::*;
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};
use sp_trie::{LayoutV0, LayoutV1, MemoryDB, TrieConfiguration, TrieDBMutBuilder, TrieMut};

/// Prepare proof of messages for the `receive_messages_proof` call.
///
Expand All @@ -43,7 +44,34 @@ where
+ pallet_bridge_grandpa::Config<R::BridgesGrandpaPalletInstance>,
PI: 'static,
<R as pallet_bridge_grandpa::Config<R::BridgesGrandpaPalletInstance>>::BridgedChain:
bp_runtime::Chain<BlockNumber = RelayBlockNumber, Hash = RelayBlockHash>,
Chain<BlockNumber = RelayBlockNumber, Hash = RelayBlockHash>,
{
match <BridgedChain<R, R::BridgesGrandpaPalletInstance> as Chain>::STATE_VERSION {
StateVersion::V0 => do_prepare_parachain_heads_proof::<R, PI, LayoutV0<RelayBlockHasher>>(
parachains,
parachain_head_size,
size,
),
StateVersion::V1 => do_prepare_parachain_heads_proof::<R, PI, LayoutV1<RelayBlockHasher>>(
parachains,
parachain_head_size,
size,
),
}
}

pub fn do_prepare_parachain_heads_proof<R, PI, L>(
parachains: &[ParaId],
parachain_head_size: u32,
size: StorageProofSize,
) -> (RelayBlockNumber, RelayBlockHash, ParaHeadsProof, Vec<(ParaId, ParaHash)>)
where
R: pallet_bridge_parachains::Config<PI>
+ pallet_bridge_grandpa::Config<R::BridgesGrandpaPalletInstance>,
PI: 'static,
<R as pallet_bridge_grandpa::Config<R::BridgesGrandpaPalletInstance>>::BridgedChain:
Chain<BlockNumber = RelayBlockNumber, Hash = RelayBlockHash>,
L: TrieConfiguration<Hash = RelayBlockHasher>,
{
let parachain_head = ParaHead(vec![0u8; parachain_head_size as usize]);

Expand All @@ -53,8 +81,7 @@ where
let mut state_root = Default::default();
let mut mdb = MemoryDB::default();
{
let mut trie =
TrieDBMutBuilderV1::<RelayBlockHasher>::new(&mut mdb, &mut state_root).build();
let mut trie = TrieDBMutBuilder::<L>::new(&mut mdb, &mut state_root).build();

// insert parachain heads
for (i, parachain) in parachains.into_iter().enumerate() {
Expand All @@ -68,18 +95,17 @@ where
trie.insert(&storage_key.0, &leaf_data)
.map_err(|_| "TrieMut::insert has failed")
.expect("TrieMut::insert should not fail in benchmarks");
storage_keys.push(storage_key);
storage_keys.push(storage_key.0);
parachain_heads.push((*parachain, parachain_head.hash()))
}
}

// generate heads storage proof
let proof = record_all_trie_keys::<LayoutV1<RelayBlockHasher>, _>(&mdb, &state_root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage_proof = UntrustedVecDb::try_from_db::<L::Hash, _>(&mdb, state_root, storage_keys)
.expect("UntrustedVecDb::try_from_db() should not fail in benchmarks");

let (relay_block_number, relay_block_hash) =
insert_header_to_grandpa_pallet::<R, R::BridgesGrandpaPalletInstance>(state_root);

(relay_block_number, relay_block_hash, ParaHeadsProof(proof), parachain_heads)
(relay_block_number, relay_block_hash, ParaHeadsProof { storage_proof }, parachain_heads)
}
4 changes: 2 additions & 2 deletions bin/runtime-common/src/refund_relayer_extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -792,7 +792,7 @@ mod tests {
ParaId(TestParachain::get()),
[parachain_head_at_relay_header_number as u8; 32].into(),
)],
parachain_heads_proof: ParaHeadsProof(vec![]),
parachain_heads_proof: ParaHeadsProof { storage_proof: Default::default() },
})
}

Expand Down Expand Up @@ -1366,7 +1366,7 @@ mod tests {
(ParaId(TestParachain::get()), [1u8; 32].into()),
(ParaId(TestParachain::get() + 1), [1u8; 32].into()),
],
parachain_heads_proof: ParaHeadsProof(vec![]),
parachain_heads_proof: ParaHeadsProof { storage_proof: Default::default() },
}),
message_delivery_call(200),
],
Expand Down
41 changes: 17 additions & 24 deletions fuzz/storage-proof/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,26 +21,14 @@
use honggfuzz::fuzz;
// Logic for checking Substrate storage proofs.

use sp_core::{Blake2Hasher, H256};
use sp_state_machine::{backend::Backend, prove_read, InMemoryBackend};
use bp_runtime::UntrustedVecDb;
use sp_core::{storage::StateVersion, Blake2Hasher};
use sp_std::vec::Vec;
use std::collections::HashMap;

fn craft_known_storage_proof(
input_vec: Vec<(Vec<u8>, Vec<u8>)>,
) -> (H256, bp_runtime::RawStorageProof) {
let storage_proof_vec =
vec![(None, input_vec.iter().map(|x| (x.0.clone(), Some(x.1.clone()))).collect())];
log::info!("Storage proof vec {:?}", storage_proof_vec);
let state_version = sp_runtime::StateVersion::default();
let backend = <InMemoryBackend<Blake2Hasher>>::from((storage_proof_vec, state_version));
let root = backend.storage_root(std::iter::empty(), state_version).0;
let vector_element_proof =
prove_read(backend, input_vec.iter().map(|x| x.0.as_slice())).unwrap();
(root, vector_element_proof.iter_nodes().cloned().collect())
}

fn transform_into_unique(input_vec: Vec<(Vec<u8>, Vec<u8>)>) -> Vec<(Vec<u8>, Vec<u8>)> {
fn transform_into_unique(
input_vec: Vec<(Vec<u8>, Option<Vec<u8>>)>,
) -> Vec<(Vec<u8>, Option<Vec<u8>>)> {
let mut output_hashmap = HashMap::new();
let mut output_vec = Vec::new();
for key_value_pair in input_vec {
Expand All @@ -53,18 +41,23 @@ fn transform_into_unique(input_vec: Vec<(Vec<u8>, Vec<u8>)>) -> Vec<(Vec<u8>, Ve
}

fn run_fuzzer() {
fuzz!(|input_vec: Vec<(Vec<u8>, Vec<u8>)>| {
fuzz!(|input_vec: Vec<(Vec<u8>, Option<Vec<u8>>)>| {
if input_vec.is_empty() {
return
}
let unique_input_vec = transform_into_unique(input_vec);
let (root, craft_known_storage_proof) = craft_known_storage_proof(unique_input_vec.clone());
let mut checker =
<bp_runtime::StorageProofChecker<Blake2Hasher>>::new(root, craft_known_storage_proof)
.expect("Valid proof passed; qed");
for key_value_pair in unique_input_vec {
let (root, storage_proof) = UntrustedVecDb::try_from_entries::<Blake2Hasher>(
StateVersion::default(),
&unique_input_vec,
)
.expect("UntrustedVecDb::try_from_entries() shouldn't fail");
let mut storage = storage_proof
.verify::<Blake2Hasher>(StateVersion::V1, &root)
.expect("UntrustedVecDb::verify() shouldn't fail");

for key_value_pair in &unique_input_vec {
log::info!("Reading value for pair {:?}", key_value_pair);
assert_eq!(checker.read_value(&key_value_pair.0), Ok(Some(key_value_pair.1.clone())));
assert_eq!(storage.get(&key_value_pair.0), Ok(&key_value_pair.1));
}
})
}
Expand Down
18 changes: 12 additions & 6 deletions modules/grandpa/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ mod tests {
TestHeader, TestNumber, TestRuntime, MAX_BRIDGED_AUTHORITIES,
};
use bp_header_chain::BridgeGrandpaCall;
use bp_runtime::BasicOperatingMode;
use bp_runtime::{BasicOperatingMode, UntrustedVecDb};
use bp_test_utils::{
authority_list, generate_owned_bridge_module_tests, make_default_justification,
make_justification_for_header, JustificationGeneratorParams, ALICE, BOB,
Expand Down Expand Up @@ -1203,11 +1203,14 @@ mod tests {
}

#[test]
fn parse_finalized_storage_proof_rejects_proof_on_unknown_header() {
fn verify_vec_db_storage_rejects_unknown_header() {
run_test(|| {
assert_noop!(
Pallet::<TestRuntime>::storage_proof_checker(Default::default(), vec![],)
.map(|_| ()),
Pallet::<TestRuntime>::verify_vec_db_storage(
Default::default(),
Default::default(),
)
.map(|_| ()),
bp_header_chain::HeaderChainError::UnknownHeader,
);
});
Expand All @@ -1216,7 +1219,10 @@ mod tests {
#[test]
fn parse_finalized_storage_accepts_valid_proof() {
run_test(|| {
let (state_root, storage_proof) = bp_runtime::craft_valid_storage_proof();
let (state_root, storage_proof) = UntrustedVecDb::try_from_entries::<
sp_core::Blake2Hasher,
>(Default::default(), &[(b"key1".to_vec(), None)])
.expect("UntrustedVecDb::try_from_entries() shouldn't fail in tests");

let mut header = test_header(2);
header.set_state_root(state_root);
Expand All @@ -1226,7 +1232,7 @@ mod tests {
<ImportedHeaders<TestRuntime>>::insert(hash, header.build());

assert_ok!(
Pallet::<TestRuntime>::storage_proof_checker(hash, storage_proof).map(|_| ())
Pallet::<TestRuntime>::verify_vec_db_storage(hash, storage_proof).map(|_| ())
);
});
}
Expand Down
33 changes: 10 additions & 23 deletions modules/messages/src/tests/messages_generation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,13 @@ use bp_messages::{
MessagePayload, OutboundLaneData,
};
use bp_runtime::{
grow_trie_leaf_value, record_all_trie_keys, AccountIdOf, Chain, HashOf, HasherOf,
RangeInclusiveExt, StorageProofSize, UntrustedVecDb,
grow_trie_leaf_value, AccountIdOf, Chain, HashOf, HasherOf, RangeInclusiveExt,
StorageProofSize, UntrustedVecDb,
};
use codec::Encode;
use frame_support::StateVersion;
use sp_std::{ops::RangeInclusive, prelude::*};
use sp_trie::{
LayoutV0, LayoutV1, MemoryDB, StorageProof, TrieConfiguration, TrieDBMutBuilder, TrieMut,
};
use sp_trie::{LayoutV0, LayoutV1, MemoryDB, TrieConfiguration, TrieDBMutBuilder, TrieMut};

/// Dummy message generation function.
pub fn generate_dummy_message(_: MessageNonce) -> MessagePayload {
Expand Down Expand Up @@ -202,15 +200,10 @@ where
}

// generate storage proof to be delivered to This chain
let read_proof = record_all_trie_keys::<L, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage = UntrustedVecDb::try_new::<HasherOf<BridgedChain>>(
StorageProof::new(read_proof),
root,
storage_keys,
)
.unwrap();
let storage =
UntrustedVecDb::try_from_db::<HasherOf<BridgedChain>, _>(&mdb, root, storage_keys)
.expect("UntrustedVecDb::try_from_db() should not fail in benchmarks");

(root, storage)
}

Expand Down Expand Up @@ -240,14 +233,8 @@ where
}

// generate storage proof to be delivered to This chain
let read_proof = record_all_trie_keys::<L, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage = UntrustedVecDb::try_new::<HasherOf<BridgedChain>>(
StorageProof::new(read_proof),
root,
vec![storage_key],
)
.unwrap();
let storage =
UntrustedVecDb::try_from_db::<HasherOf<BridgedChain>, _>(&mdb, root, vec![storage_key])
.expect("UntrustedVecDb::try_from_db() should not fail in benchmarks");
(root, storage)
}
2 changes: 1 addition & 1 deletion modules/parachains/src/call_ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ mod tests {
RuntimeCall::Parachains(crate::Call::<TestRuntime, ()>::submit_parachain_heads {
at_relay_block: (num, Default::default()),
parachains,
parachain_heads_proof: ParaHeadsProof(Vec::new()),
parachain_heads_proof: ParaHeadsProof { storage_proof: Default::default() },
})
.check_obsolete_submit_parachain_heads()
.is_ok()
Expand Down
Loading

0 comments on commit 2632d78

Please sign in to comment.