Skip to content

Commit

Permalink
Bridge: added force_set_pallet_state call to pallet-bridge-grandpa (p…
Browse files Browse the repository at this point in the history
…aritytech#4465)

closes paritytech/parity-bridges-common#2963

See issue above for rationale
I've been thinking about adding similar calls to other pallets, but:
- for parachains pallet I haven't been able to think of a case when we
will need that given how long referendum takes. I.e. if storage proof
format changes and we want to unstuck the bridge, it'll take a large a
few weeks to sync a single parachain header, then another weeks for
another and etc.
- for messages pallet I've made the similar call initially, but it just
changes a storage key (`OutboundLanes` and/or `InboundLanes`), so
there's no any logic here and it may be simply done using
`system.set_storage`.

---------

Co-authored-by: command-bot <>
  • Loading branch information
svyatonik authored and hitchhooker committed Jun 5, 2024
1 parent 059d1e1 commit 38afe3b
Show file tree
Hide file tree
Showing 6 changed files with 311 additions and 45 deletions.
14 changes: 14 additions & 0 deletions bridges/modules/grandpa/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,5 +138,19 @@ benchmarks_instance_pallet! {
assert!(!<ImportedHeaders<T, I>>::contains_key(genesis_header.hash()));
}

force_set_pallet_state {
let set_id = 100;
let authorities = accounts(T::BridgedChain::MAX_AUTHORITIES_COUNT as u16)
.iter()
.map(|id| (AuthorityId::from(*id), 1))
.collect::<Vec<_>>();
let (header, _) = prepare_benchmark_data::<T, I>(1, 1);
let expected_hash = header.hash();
}: force_set_pallet_state(RawOrigin::Root, set_id, authorities, Box::new(header))
verify {
assert_eq!(<BestFinalized<T, I>>::get().unwrap().1, expected_hash);
assert_eq!(<CurrentAuthoritySet<T, I>>::get().set_id, set_id);
}

impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::TestRuntime)
}
178 changes: 160 additions & 18 deletions bridges/modules/grandpa/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ use bp_header_chain::{
};
use bp_runtime::{BlockNumberOf, HashOf, HasherOf, HeaderId, HeaderOf, OwnedBridgeModule};
use frame_support::{dispatch::PostDispatchInfo, ensure, DefaultNoBound};
use sp_consensus_grandpa::SetId;
use sp_consensus_grandpa::{AuthorityList, SetId};
use sp_runtime::{
traits::{Header as HeaderT, Zero},
SaturatedConversion,
Expand Down Expand Up @@ -360,6 +360,42 @@ pub mod pallet {

Ok(PostDispatchInfo { actual_weight: Some(actual_weight), pays_fee })
}

/// Set current authorities set and best finalized bridged header to given values
/// (almost) without any checks. This call can fail only if:
///
/// - the call origin is not a root or a pallet owner;
///
/// - there are too many authorities in the new set.
///
/// No other checks are made. Previously imported headers stay in the storage and
/// are still accessible after the call.
#[pallet::call_index(5)]
#[pallet::weight(T::WeightInfo::force_set_pallet_state())]
pub fn force_set_pallet_state(
origin: OriginFor<T>,
new_current_set_id: SetId,
new_authorities: AuthorityList,
new_best_header: Box<BridgedHeader<T, I>>,
) -> DispatchResult {
Self::ensure_owner_or_root(origin)?;

// save new authorities set. It only fails if there are too many authorities
// in the new set
save_authorities_set::<T, I>(
CurrentAuthoritySet::<T, I>::get().set_id,
new_current_set_id,
new_authorities,
)?;

// save new best header. It may be older than the best header that is already
// known to the pallet - it changes nothing (except for the fact that previously
// imported headers may still be used to prove something)
let new_best_header_hash = new_best_header.hash();
insert_header::<T, I>(*new_best_header, new_best_header_hash);

Ok(())
}
}

/// Number of free header submissions that we may yet accept in the current block.
Expand Down Expand Up @@ -592,33 +628,45 @@ pub mod pallet {
// GRANDPA only includes a `delay` for forced changes, so this isn't valid.
ensure!(change.delay == Zero::zero(), <Error<T, I>>::UnsupportedScheduledChange);

// TODO [#788]: Stop manually increasing the `set_id` here.
let next_authorities = StoredAuthoritySet::<T, I> {
authorities: change
.next_authorities
.try_into()
.map_err(|_| Error::<T, I>::TooManyAuthoritiesInSet)?,
set_id: current_set_id + 1,
};

// Since our header schedules a change and we know the delay is 0, it must also enact
// the change.
<CurrentAuthoritySet<T, I>>::put(&next_authorities);

log::info!(
target: LOG_TARGET,
"Transitioned from authority set {} to {}! New authorities are: {:?}",
// TODO [#788]: Stop manually increasing the `set_id` here.
return save_authorities_set::<T, I>(
current_set_id,
current_set_id + 1,
next_authorities,
change.next_authorities,
);

return Ok(Some(next_authorities.into()))
};

Ok(None)
}

/// Save new authorities set.
pub(crate) fn save_authorities_set<T: Config<I>, I: 'static>(
old_current_set_id: SetId,
new_current_set_id: SetId,
new_authorities: AuthorityList,
) -> Result<Option<AuthoritySet>, DispatchError> {
let next_authorities = StoredAuthoritySet::<T, I> {
authorities: new_authorities
.try_into()
.map_err(|_| Error::<T, I>::TooManyAuthoritiesInSet)?,
set_id: new_current_set_id,
};

<CurrentAuthoritySet<T, I>>::put(&next_authorities);

log::info!(
target: LOG_TARGET,
"Transitioned from authority set {} to {}! New authorities are: {:?}",
old_current_set_id,
new_current_set_id,
next_authorities,
);

Ok(Some(next_authorities.into()))
}

/// Verify a GRANDPA justification (finality proof) for a given header.
///
/// Will use the GRANDPA current authorities known to the pallet.
Expand Down Expand Up @@ -1700,4 +1748,98 @@ mod tests {
assert_eq!(FreeHeadersRemaining::<TestRuntime, ()>::get(), Some(0));
})
}

#[test]
fn force_set_pallet_state_works() {
run_test(|| {
let header25 = test_header(25);
let header50 = test_header(50);
let ok_new_set_id = 100;
let ok_new_authorities = authority_list();
let bad_new_set_id = 100;
let bad_new_authorities: Vec<_> = std::iter::repeat((ALICE.into(), 1))
.take(MAX_BRIDGED_AUTHORITIES as usize + 1)
.collect();

// initialize and import several headers
initialize_substrate_bridge();
assert_ok!(submit_finality_proof(30));

// wrong origin => error
assert_noop!(
Pallet::<TestRuntime>::force_set_pallet_state(
RuntimeOrigin::signed(1),
ok_new_set_id,
ok_new_authorities.clone(),
Box::new(header50.clone()),
),
DispatchError::BadOrigin,
);

// too many authorities in the set => error
assert_noop!(
Pallet::<TestRuntime>::force_set_pallet_state(
RuntimeOrigin::root(),
bad_new_set_id,
bad_new_authorities.clone(),
Box::new(header50.clone()),
),
Error::<TestRuntime>::TooManyAuthoritiesInSet,
);

// force import header 50 => ok
assert_ok!(Pallet::<TestRuntime>::force_set_pallet_state(
RuntimeOrigin::root(),
ok_new_set_id,
ok_new_authorities.clone(),
Box::new(header50.clone()),
),);

// force import header 25 after 50 => ok
assert_ok!(Pallet::<TestRuntime>::force_set_pallet_state(
RuntimeOrigin::root(),
ok_new_set_id,
ok_new_authorities.clone(),
Box::new(header25.clone()),
),);

// we may import better headers
assert_noop!(submit_finality_proof(20), Error::<TestRuntime>::OldHeader);
assert_ok!(submit_finality_proof_with_set_id(26, ok_new_set_id));

// we can even reimport header #50. It **will cause** some issues during pruning
// (see below)
assert_ok!(submit_finality_proof_with_set_id(50, ok_new_set_id));

// and all headers are available. Even though there are 4 headers, the ring
// buffer thinks that there are 5, because we've imported header $50 twice
assert!(GrandpaChainHeaders::<TestRuntime, ()>::finalized_header_state_root(
test_header(30).hash()
)
.is_some());
assert!(GrandpaChainHeaders::<TestRuntime, ()>::finalized_header_state_root(
test_header(50).hash()
)
.is_some());
assert!(GrandpaChainHeaders::<TestRuntime, ()>::finalized_header_state_root(
test_header(25).hash()
)
.is_some());
assert!(GrandpaChainHeaders::<TestRuntime, ()>::finalized_header_state_root(
test_header(26).hash()
)
.is_some());

// next header import will prune header 30
assert_ok!(submit_finality_proof_with_set_id(70, ok_new_set_id));
// next header import will prune header 50
assert_ok!(submit_finality_proof_with_set_id(80, ok_new_set_id));
// next header import will prune header 25
assert_ok!(submit_finality_proof_with_set_id(90, ok_new_set_id));
// next header import will prune header 26
assert_ok!(submit_finality_proof_with_set_id(100, ok_new_set_id));
// next header import will prune header 50 again. But it is fine
assert_ok!(submit_finality_proof_with_set_id(110, ok_new_set_id));
});
}
}
49 changes: 49 additions & 0 deletions bridges/modules/grandpa/src/weights.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ use sp_std::marker::PhantomData;
/// Weight functions needed for pallet_bridge_grandpa.
pub trait WeightInfo {
fn submit_finality_proof(p: u32, v: u32) -> Weight;
fn force_set_pallet_state() -> Weight;
}

/// Weights for `pallet_bridge_grandpa` that are generated using one of the Bridge testnets.
Expand Down Expand Up @@ -109,6 +110,30 @@ impl<T: frame_system::Config> WeightInfo for BridgeWeight<T> {
.saturating_add(T::DbWeight::get().reads(6_u64))
.saturating_add(T::DbWeight::get().writes(6_u64))
}

/// Storage: `BridgeWestendGrandpa::CurrentAuthoritySet` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::CurrentAuthoritySet` (`max_values`: Some(1), `max_size`:
/// Some(50250), added: 50745, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::ImportedHashesPointer` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::ImportedHashesPointer` (`max_values`: Some(1), `max_size`:
/// Some(4), added: 499, mode: `MaxEncodedLen`) Storage: `BridgeWestendGrandpa::ImportedHashes`
/// (r:1 w:1) Proof: `BridgeWestendGrandpa::ImportedHashes` (`max_values`: Some(1024),
/// `max_size`: Some(36), added: 1521, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::BestFinalized` (r:0 w:1)
/// Proof: `BridgeWestendGrandpa::BestFinalized` (`max_values`: Some(1), `max_size`: Some(36),
/// added: 531, mode: `MaxEncodedLen`) Storage: `BridgeWestendGrandpa::ImportedHeaders` (r:0
/// w:2) Proof: `BridgeWestendGrandpa::ImportedHeaders` (`max_values`: Some(1024), `max_size`:
/// Some(68), added: 1553, mode: `MaxEncodedLen`)
fn force_set_pallet_state() -> Weight {
// Proof Size summary in bytes:
// Measured: `452`
// Estimated: `51735`
// Minimum execution time: 62_232_000 picoseconds.
Weight::from_parts(78_755_000, 0)
.saturating_add(Weight::from_parts(0, 51735))
.saturating_add(RocksDbWeight::get().reads(3))
.saturating_add(RocksDbWeight::get().writes(6))
}
}

// For backwards compatibility and tests
Expand Down Expand Up @@ -164,4 +189,28 @@ impl WeightInfo for () {
.saturating_add(RocksDbWeight::get().reads(6_u64))
.saturating_add(RocksDbWeight::get().writes(6_u64))
}

/// Storage: `BridgeWestendGrandpa::CurrentAuthoritySet` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::CurrentAuthoritySet` (`max_values`: Some(1), `max_size`:
/// Some(50250), added: 50745, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::ImportedHashesPointer` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::ImportedHashesPointer` (`max_values`: Some(1), `max_size`:
/// Some(4), added: 499, mode: `MaxEncodedLen`) Storage: `BridgeWestendGrandpa::ImportedHashes`
/// (r:1 w:1) Proof: `BridgeWestendGrandpa::ImportedHashes` (`max_values`: Some(1024),
/// `max_size`: Some(36), added: 1521, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::BestFinalized` (r:0 w:1)
/// Proof: `BridgeWestendGrandpa::BestFinalized` (`max_values`: Some(1), `max_size`: Some(36),
/// added: 531, mode: `MaxEncodedLen`) Storage: `BridgeWestendGrandpa::ImportedHeaders` (r:0
/// w:2) Proof: `BridgeWestendGrandpa::ImportedHeaders` (`max_values`: Some(1024), `max_size`:
/// Some(68), added: 1553, mode: `MaxEncodedLen`)
fn force_set_pallet_state() -> Weight {
// Proof Size summary in bytes:
// Measured: `452`
// Estimated: `51735`
// Minimum execution time: 62_232_000 picoseconds.
Weight::from_parts(78_755_000, 0)
.saturating_add(Weight::from_parts(0, 51735))
.saturating_add(RocksDbWeight::get().reads(3))
.saturating_add(RocksDbWeight::get().writes(6))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@

//! Autogenerated weights for `pallet_bridge_grandpa`
//!
//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev
//! DATE: 2023-12-12, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]`
//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 32.0.0
//! DATE: 2024-05-17, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]`
//! WORST CASE MAP SIZE: `1000000`
//! HOSTNAME: `runner-itmxxexx-project-674-concurrent-0`, CPU: `Intel(R) Xeon(R) CPU @ 2.60GHz`
//! HOSTNAME: `runner-unxyhko3-project-674-concurrent-0`, CPU: `Intel(R) Xeon(R) CPU @ 2.60GHz`
//! WASM-EXECUTION: `Compiled`, CHAIN: `Some("bridge-hub-rococo-dev")`, DB CACHE: 1024

// Executed Command:
Expand Down Expand Up @@ -48,12 +48,14 @@ use core::marker::PhantomData;
/// Weight functions for `pallet_bridge_grandpa`.
pub struct WeightInfo<T>(PhantomData<T>);
impl<T: frame_system::Config> pallet_bridge_grandpa::WeightInfo for WeightInfo<T> {
/// Storage: `BridgeWestendGrandpa::CurrentAuthoritySet` (r:1 w:0)
/// Proof: `BridgeWestendGrandpa::CurrentAuthoritySet` (`max_values`: Some(1), `max_size`: Some(50250), added: 50745, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::PalletOperatingMode` (r:1 w:0)
/// Proof: `BridgeWestendGrandpa::PalletOperatingMode` (`max_values`: Some(1), `max_size`: Some(1), added: 496, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::BestFinalized` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::BestFinalized` (`max_values`: Some(1), `max_size`: Some(36), added: 531, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::CurrentAuthoritySet` (r:1 w:0)
/// Proof: `BridgeWestendGrandpa::CurrentAuthoritySet` (`max_values`: Some(1), `max_size`: Some(50250), added: 50745, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::FreeHeadersRemaining` (r:1 w:0)
/// Proof: `BridgeWestendGrandpa::FreeHeadersRemaining` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::ImportedHashesPointer` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::ImportedHashesPointer` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::ImportedHashes` (r:1 w:1)
Expand All @@ -62,18 +64,36 @@ impl<T: frame_system::Config> pallet_bridge_grandpa::WeightInfo for WeightInfo<T
/// Proof: `BridgeWestendGrandpa::ImportedHeaders` (`max_values`: Some(1024), `max_size`: Some(68), added: 1553, mode: `MaxEncodedLen`)
/// The range of component `p` is `[1, 838]`.
/// The range of component `v` is `[50, 100]`.
fn submit_finality_proof(p: u32, v: u32, ) -> Weight {
fn submit_finality_proof(p: u32, _v: u32, ) -> Weight {
// Proof Size summary in bytes:
// Measured: `335 + p * (60 ±0)`
// Measured: `440 + p * (60 ±0)`
// Estimated: `51735`
// Minimum execution time: 310_124_000 picoseconds.
Weight::from_parts(18_294_977, 0)
// Minimum execution time: 306_046_000 picoseconds.
Weight::from_parts(384_361_000, 0)
.saturating_add(Weight::from_parts(0, 51735))
// Standard Error: 5_665
.saturating_add(Weight::from_parts(55_380_719, 0).saturating_mul(p.into()))
// Standard Error: 94_494
.saturating_add(Weight::from_parts(2_765_959, 0).saturating_mul(v.into()))
.saturating_add(T::DbWeight::get().reads(5))
// Standard Error: 14_298
.saturating_add(Weight::from_parts(49_045_748, 0).saturating_mul(p.into()))
.saturating_add(T::DbWeight::get().reads(6))
.saturating_add(T::DbWeight::get().writes(5))
}
/// Storage: `BridgeWestendGrandpa::CurrentAuthoritySet` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::CurrentAuthoritySet` (`max_values`: Some(1), `max_size`: Some(50250), added: 50745, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::ImportedHashesPointer` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::ImportedHashesPointer` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::ImportedHashes` (r:1 w:1)
/// Proof: `BridgeWestendGrandpa::ImportedHashes` (`max_values`: Some(1024), `max_size`: Some(36), added: 1521, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::BestFinalized` (r:0 w:1)
/// Proof: `BridgeWestendGrandpa::BestFinalized` (`max_values`: Some(1), `max_size`: Some(36), added: 531, mode: `MaxEncodedLen`)
/// Storage: `BridgeWestendGrandpa::ImportedHeaders` (r:0 w:2)
/// Proof: `BridgeWestendGrandpa::ImportedHeaders` (`max_values`: Some(1024), `max_size`: Some(68), added: 1553, mode: `MaxEncodedLen`)
fn force_set_pallet_state() -> Weight {
// Proof Size summary in bytes:
// Measured: `452`
// Estimated: `51735`
// Minimum execution time: 94_965_000 picoseconds.
Weight::from_parts(113_633_000, 0)
.saturating_add(Weight::from_parts(0, 51735))
.saturating_add(T::DbWeight::get().reads(3))
.saturating_add(T::DbWeight::get().writes(6))
}
}
Loading

0 comments on commit 38afe3b

Please sign in to comment.