diff --git a/packages/api/package.json b/packages/api/package.json index f87de28f088..0d41923cd20 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -70,7 +70,7 @@ }, "dependencies": { "@chainsafe/persistent-merkle-tree": "^0.5.0", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/config": "^1.11.1", "@lodestar/params": "^1.11.1", "@lodestar/types": "^1.11.1", diff --git a/packages/api/src/beacon/routes/lodestar.ts b/packages/api/src/beacon/routes/lodestar.ts index 8979f31a14c..aace9baa0c2 100644 --- a/packages/api/src/beacon/routes/lodestar.ts +++ b/packages/api/src/beacon/routes/lodestar.ts @@ -68,6 +68,7 @@ export type StateCacheItem = { /** Unix timestamp (ms) of the last read */ lastRead: number; checkpointState: boolean; + persistentKey?: string; }; export type LodestarNodePeer = NodePeer & { diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index 44759696641..2be7a5d692a 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -104,7 +104,7 @@ "@chainsafe/libp2p-noise": "^13.0.0", "@chainsafe/persistent-merkle-tree": "^0.5.0", "@chainsafe/prometheus-gc-stats": "^1.0.0", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@chainsafe/threads": "^1.11.1", "@ethersproject/abi": "^5.7.0", "@fastify/bearer-auth": "^9.0.0", diff --git a/packages/beacon-node/src/chain/archiver/archiveStates.ts b/packages/beacon-node/src/chain/archiver/archiveStates.ts index 98b083b0513..92942686bc5 100644 --- a/packages/beacon-node/src/chain/archiver/archiveStates.ts +++ b/packages/beacon-node/src/chain/archiver/archiveStates.ts @@ -5,6 +5,7 @@ import {computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-trans import {CheckpointWithHex} from "@lodestar/fork-choice"; import {IBeaconDb} from "../../db/index.js"; import {IStateRegenerator} from "../regen/interface.js"; +import {getStateSlotFromBytes} from "../../util/multifork.js"; /** * Minimum number of epochs between single temp archived states @@ -83,13 +84,22 @@ export class StatesArchiver { * Only the new finalized state is stored to disk */ async archiveState(finalized: CheckpointWithHex): Promise { - const finalizedState = this.regen.getCheckpointStateSync(finalized); - if (!finalizedState) { - throw Error("No state in cache for finalized checkpoint state epoch #" + finalized.epoch); + // the finalized state could be from to disk + const finalizedStateOrBytes = await this.regen.getCheckpointStateOrBytes(finalized); + const {rootHex} = finalized; + if (!finalizedStateOrBytes) { + throw Error(`No state in cache for finalized checkpoint state epoch #${finalized.epoch} root ${rootHex}`); + } + if (finalizedStateOrBytes instanceof Uint8Array) { + const slot = getStateSlotFromBytes(finalizedStateOrBytes); + await this.db.stateArchive.putBinary(slot, finalizedStateOrBytes); + this.logger.verbose("Archived finalized state bytes", {finalizedEpoch: finalized.epoch, slot, root: rootHex}); + } else { + // state + await this.db.stateArchive.put(finalizedStateOrBytes.slot, finalizedStateOrBytes); + this.logger.verbose("Archived finalized state", {epoch: finalized.epoch, root: rootHex}); } - await this.db.stateArchive.put(finalizedState.slot, finalizedState); // don't delete states before the finalized state, auto-prune will take care of it - this.logger.verbose("Archived finalized state", {finalizedEpoch: finalized.epoch}); } } diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 55d798df8e3..17ce62c12a2 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -15,7 +15,7 @@ import {ZERO_HASH_HEX} from "../../constants/index.js"; import {toCheckpointHex} from "../stateCache/index.js"; import {isOptimisticBlock} from "../../util/forkChoice.js"; import {isQueueErrorAborted} from "../../util/queue/index.js"; -import {ChainEvent, ReorgEventData} from "../emitter.js"; +import {ReorgEventData} from "../emitter.js"; import {REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC} from "../reprocess.js"; import type {BeaconChain} from "../chain.js"; import {FullyVerifiedBlock, ImportBlockOpts, AttestationImportOpt} from "./types.js"; @@ -62,6 +62,7 @@ export async function importBlock( const blockRootHex = toHexString(blockRoot); const currentEpoch = computeEpochAtSlot(this.forkChoice.getTime()); const blockEpoch = computeEpochAtSlot(block.message.slot); + const parentEpoch = computeEpochAtSlot(parentBlockSlot); const prevFinalizedEpoch = this.forkChoice.getFinalizedCheckpoint().epoch; const blockDelaySec = (fullyVerifiedBlock.seenTimestampSec - postState.genesisTime) % this.config.SECONDS_PER_SLOT; @@ -202,16 +203,15 @@ export async function importBlock( } } - // 5. Compute head. If new head, immediately stateCache.setHeadState() + // 5. Compute head, always add to state cache so that it'll not be pruned soon const oldHead = this.forkChoice.getHead(); const newHead = this.recomputeForkChoiceHead(); const currFinalizedEpoch = this.forkChoice.getFinalizedCheckpoint().epoch; + // always set head state so it'll never be pruned from state cache + this.regen.updateHeadState(newHead.stateRoot, postState); if (newHead.blockRoot !== oldHead.blockRoot) { - // Set head state as strong reference - this.regen.updateHeadState(newHead.stateRoot, postState); - this.emitter.emit(routes.events.EventType.head, { block: newHead.blockRoot, epochTransition: computeStartSlotAtEpoch(computeEpochAtSlot(newHead.slot)) === newHead.slot, @@ -331,12 +331,20 @@ export async function importBlock( this.logger.verbose("After importBlock caching postState without SSZ cache", {slot: postState.slot}); } - if (block.message.slot % SLOTS_PER_EPOCH === 0) { - // Cache state to preserve epoch transition work + if (parentEpoch < blockEpoch) { + // current epoch and previous epoch are likely cached in previous states + this.shufflingCache.processState(postState, postState.epochCtx.nextShuffling.epoch); + this.logger.verbose("Processed shuffling for next epoch", {parentEpoch, blockEpoch, slot: block.message.slot}); + + // This is the real check point state per spec because the root is in current epoch + // it's important to add this to cache, when chain is finalized we'll query this state later const checkpointState = postState; const cp = getCheckpointFromState(checkpointState); - this.regen.addCheckpointState(cp, checkpointState); - this.emitter.emit(ChainEvent.checkpoint, cp, checkpointState); + // add Current Root Checkpoint State to the checkpoint state cache + // this could be the justified/finalized checkpoint state later according to https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.2/specs/phase0/beacon-chain.md + if (block.message.slot % SLOTS_PER_EPOCH === 0) { + this.regen.addCheckpointState(cp, checkpointState); + } // Note: in-lined code from previos handler of ChainEvent.checkpoint this.logger.verbose("Checkpoint processed", toCheckpointHex(cp)); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 694cb549595..ce914db1190 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -39,7 +39,7 @@ import {IExecutionEngine, IExecutionBuilder} from "../execution/index.js"; import {Clock, ClockEvent, IClock} from "../util/clock.js"; import {ensureDir, writeIfNotExist} from "../util/file.js"; import {isOptimisticBlock} from "../util/forkChoice.js"; -import {CheckpointStateCache, StateContextCache} from "./stateCache/index.js"; +import {CHECKPOINT_STATES_FOLDER, PersistentCheckpointStateCache, LRUBlockStateCache} from "./stateCache/index.js"; import {BlockProcessor, ImportBlockOpts} from "./blocks/index.js"; import {ChainEventEmitter, ChainEvent} from "./emitter.js"; import {IBeaconChain, ProposerPreparationData, BlockHash, StateGetOpts} from "./interface.js"; @@ -75,6 +75,11 @@ import {BlockAttributes, produceBlockBody} from "./produceBlock/produceBlockBody import {computeNewStateRoot} from "./produceBlock/computeNewStateRoot.js"; import {BlockInput} from "./blocks/types.js"; import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js"; +import {ShufflingCache} from "./shufflingCache.js"; +import {MemoryCheckpointStateCache} from "./stateCache/memoryCheckpointsCache.js"; +import {FilePersistentApis} from "./stateCache/persistent/file.js"; +import {DbPersistentApis} from "./stateCache/persistent/db.js"; +import {StateContextCache} from "./stateCache/stateContextCache.js"; /** * Arbitrary constants, blobs should be consumed immediately in the same slot they are produced. @@ -130,6 +135,7 @@ export class BeaconChain implements IBeaconChain { readonly beaconProposerCache: BeaconProposerCache; readonly checkpointBalancesCache: CheckpointBalancesCache; + readonly shufflingCache: ShufflingCache; // TODO DENEB: Prune data structure every time period, for both old entries /** Map keyed by executionPayload.blockHash of the block for those blobs */ readonly producedBlobSidecarsCache = new Map(); @@ -211,6 +217,7 @@ export class BeaconChain implements IBeaconChain { this.beaconProposerCache = new BeaconProposerCache(opts); this.checkpointBalancesCache = new CheckpointBalancesCache(); + this.shufflingCache = new ShufflingCache(metrics); // Restore state caches // anchorState may already by a CachedBeaconState. If so, don't create the cache again, since deserializing all @@ -225,16 +232,37 @@ export class BeaconChain implements IBeaconChain { pubkey2index: new PubkeyIndexMap(), index2pubkey: [], }); + this.shufflingCache.processState(cachedState, cachedState.epochCtx.previousShuffling.epoch); + this.shufflingCache.processState(cachedState, cachedState.epochCtx.currentShuffling.epoch); + this.shufflingCache.processState(cachedState, cachedState.epochCtx.nextShuffling.epoch); // Persist single global instance of state caches this.pubkey2index = cachedState.epochCtx.pubkey2index; this.index2pubkey = cachedState.epochCtx.index2pubkey; - const stateCache = new StateContextCache({metrics}); - const checkpointStateCache = new CheckpointStateCache({metrics}); + const stateCache = this.opts.nHistoricalStates + ? new LRUBlockStateCache(this.opts, {metrics}) + : new StateContextCache({metrics}); + const persistentApis = this.opts.persistCheckpointStatesToFile + ? new FilePersistentApis(CHECKPOINT_STATES_FOLDER) + : new DbPersistentApis(this.db); + const checkpointStateCache = this.opts.nHistoricalStates + ? new PersistentCheckpointStateCache( + { + metrics, + logger, + clock, + shufflingCache: this.shufflingCache, + getHeadState: this.getHeadState.bind(this), + persistentApis, + }, + this.opts + ) + : new MemoryCheckpointStateCache({metrics}); const {checkpoint} = computeAnchorCheckpoint(config, anchorState); stateCache.add(cachedState); + // TODO: remove once we go with n-historical states stateCache.setHeadState(cachedState); checkpointStateCache.add(checkpoint, cachedState); @@ -322,6 +350,7 @@ export class BeaconChain implements IBeaconChain { /** Populate in-memory caches with persisted data. Call at least once on startup */ async loadFromDisk(): Promise { + await this.regen.init(); await this.opPool.fromPersisted(this.db); } @@ -841,15 +870,19 @@ export class BeaconChain implements IBeaconChain { this.logger.verbose("Fork choice justified", {epoch: cp.epoch, root: cp.rootHex}); } - private onForkChoiceFinalized(this: BeaconChain, cp: CheckpointWithHex): void { + private async onForkChoiceFinalized(this: BeaconChain, cp: CheckpointWithHex): Promise { this.logger.verbose("Fork choice finalized", {epoch: cp.epoch, root: cp.rootHex}); this.seenBlockProposers.prune(computeStartSlotAtEpoch(cp.epoch)); // TODO: Improve using regen here const headState = this.regen.getStateSync(this.forkChoice.getHead().stateRoot); - const finalizedState = this.regen.getCheckpointStateSync(cp); + // the finalized state could be from disk + const finalizedStateOrBytes = await this.regen.getCheckpointStateOrBytes(cp); + if (!finalizedStateOrBytes) { + throw Error("No state in cache for finalized checkpoint state epoch #" + cp.epoch); + } if (headState) { - this.opPool.pruneAll(headState, finalizedState); + this.opPool.pruneAll(headState, finalizedStateOrBytes); } } diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index 78fbf2c5a3f..d379a7f7379 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -36,6 +36,7 @@ import {CheckpointBalancesCache} from "./balancesCache.js"; import {IChainOptions} from "./options.js"; import {AssembledBlockType, BlockAttributes, BlockType} from "./produceBlock/produceBlockBody.js"; import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js"; +import {ShufflingCache} from "./shufflingCache.js"; export {BlockType, AssembledBlockType}; export {ProposerPreparationData}; @@ -93,6 +94,7 @@ export interface IBeaconChain { readonly beaconProposerCache: BeaconProposerCache; readonly checkpointBalancesCache: CheckpointBalancesCache; + readonly shufflingCache: ShufflingCache; readonly producedBlobSidecarsCache: Map; readonly producedBlindedBlobSidecarsCache: Map; readonly producedBlockRoot: Set; diff --git a/packages/beacon-node/src/chain/opPools/opPool.ts b/packages/beacon-node/src/chain/opPools/opPool.ts index b2a49ae4c07..ef30add284a 100644 --- a/packages/beacon-node/src/chain/opPools/opPool.ts +++ b/packages/beacon-node/src/chain/opPools/opPool.ts @@ -14,8 +14,13 @@ import { BLS_WITHDRAWAL_PREFIX, } from "@lodestar/params"; import {Epoch, phase0, capella, ssz, ValidatorIndex} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; import {IBeaconDb} from "../../db/index.js"; import {SignedBLSToExecutionChangeVersioned} from "../../util/types.js"; +import { + getValidatorsBytesFromStateBytes, + getWithdrawalCredentialFirstByteFromValidatorBytes, +} from "../../util/sszBytes.js"; import {isValidBlsToExecutionChangeForBlockInclusion} from "./utils.js"; type HexRoot = string; @@ -270,11 +275,11 @@ export class OpPool { /** * Prune all types of transactions given the latest head state */ - pruneAll(headState: CachedBeaconStateAllForks, finalizedState: CachedBeaconStateAllForks | null): void { + pruneAll(headState: CachedBeaconStateAllForks, finalizedState: CachedBeaconStateAllForks | Uint8Array): void { this.pruneAttesterSlashings(headState); this.pruneProposerSlashings(headState); this.pruneVoluntaryExits(headState); - this.pruneBlsToExecutionChanges(headState, finalizedState); + this.pruneBlsToExecutionChanges(headState.config, finalizedState); } /** @@ -344,17 +349,34 @@ export class OpPool { * credentials */ private pruneBlsToExecutionChanges( - headState: CachedBeaconStateAllForks, - finalizedState: CachedBeaconStateAllForks | null + config: ChainForkConfig, + finalizedStateOrBytes: CachedBeaconStateAllForks | Uint8Array ): void { + const validatorBytes = + finalizedStateOrBytes instanceof Uint8Array + ? getValidatorsBytesFromStateBytes(config, finalizedStateOrBytes) + : null; + for (const [key, blsToExecutionChange] of this.blsToExecutionChanges.entries()) { - // TODO CAPELLA: We need the finalizedState to safely prune BlsToExecutionChanges. Finalized state may not be - // available in the cache, so it can be null. Once there's a head only prunning strategy, change - if (finalizedState !== null) { - const validator = finalizedState.validators.getReadonly(blsToExecutionChange.data.message.validatorIndex); - if (validator.withdrawalCredentials[0] !== BLS_WITHDRAWAL_PREFIX) { - this.blsToExecutionChanges.delete(key); + // there are at least finalied state bytes + let withDrawableCredentialFirstByte: number | null; + const validatorIndex = blsToExecutionChange.data.message.validatorIndex; + if (finalizedStateOrBytes instanceof Uint8Array) { + if (!validatorBytes) { + throw Error( + "Not able to extract validator bytes from finalized state bytes with length " + finalizedStateOrBytes.length + ); } + withDrawableCredentialFirstByte = getWithdrawalCredentialFirstByteFromValidatorBytes( + validatorBytes, + validatorIndex + ); + } else { + const validator = finalizedStateOrBytes.validators.getReadonly(validatorIndex); + withDrawableCredentialFirstByte = validator.withdrawalCredentials[0]; + } + if (withDrawableCredentialFirstByte !== BLS_WITHDRAWAL_PREFIX) { + this.blsToExecutionChanges.delete(key); } } } diff --git a/packages/beacon-node/src/chain/options.ts b/packages/beacon-node/src/chain/options.ts index 9f826d1a240..26d6c01d52b 100644 --- a/packages/beacon-node/src/chain/options.ts +++ b/packages/beacon-node/src/chain/options.ts @@ -3,12 +3,16 @@ import {defaultOptions as defaultValidatorOptions} from "@lodestar/validator"; import {ArchiverOpts} from "./archiver/index.js"; import {ForkChoiceOpts} from "./forkChoice/index.js"; import {LightClientServerOpts} from "./lightClient/index.js"; +import {PersistentCheckpointStateCacheOpts} from "./stateCache/types.js"; +import {LRUBlockStateCacheOpts} from "./stateCache/lruBlockStateCache.js"; export type IChainOptions = BlockProcessOpts & PoolOpts & SeenCacheOpts & ForkChoiceOpts & ArchiverOpts & + LRUBlockStateCacheOpts & + PersistentCheckpointStateCacheOpts & LightClientServerOpts & { blsVerifyAllMainThread?: boolean; blsVerifyAllMultiThread?: boolean; @@ -27,6 +31,9 @@ export type IChainOptions = BlockProcessOpts & trustedSetup?: string; broadcastValidationStrictness?: string; minSameMessageSignatureSetsToBatch: number; + nHistoricalStates?: boolean; + /** by default persist checkpoint state to db */ + persistCheckpointStatesToFile?: boolean; }; export type BlockProcessOpts = { @@ -88,4 +95,14 @@ export const defaultChainOptions: IChainOptions = { // batching too much may block the I/O thread so if useWorker=false, suggest this value to be 32 // since this batch attestation work is designed to work with useWorker=true, make this the lowest value minSameMessageSignatureSetsToBatch: 2, + // TODO: change to false, leaving here to ease testing + nHistoricalStates: true, + // by default, persist checkpoint states to db + persistCheckpointStatesToFile: false, + + // since Sep 2023, only cache up to 32 states by default. If a big reorg happens it'll load checkpoint state from disk and regen from there. + // TODO: change to 128 which is the old StateCache config, only change back to 32 when we enable n-historical state, leaving here to ease testing + maxStates: 32, + // only used when persistentCheckpointStateCache = true + maxEpochsInMemory: 2, }; diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index 1091fd716b6..42003052da0 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -83,7 +83,8 @@ export class PrepareNextSlotScheduler { headSlot, clockSlot, }); - + // It's important to still do this to get through Holesky unfinality time of low resouce nodes + await this.prunePerSlot(clockSlot); return; } @@ -173,6 +174,9 @@ export class PrepareNextSlotScheduler { this.chain.emitter.emit(routes.events.EventType.payloadAttributes, {data, version: fork}); } } + + // do this after all as it's the lowest priority task + await this.prunePerSlot(clockSlot); } catch (e) { if (!isErrorAborted(e) && !isQueueErrorAborted(e)) { this.metrics?.precomputeNextEpochTransition.count.inc({result: "error"}, 1); @@ -180,4 +184,18 @@ export class PrepareNextSlotScheduler { } } }; + + /** + * Pruning at the last 1/3 slot of first slot of epoch is the safest time because all epoch transitions already use the checkpoint states cached + * one down side of this is when `inMemoryEpochs = 0` and gossip block hasn't come yet then we have to reload state we added 2/3 slot ago + * However, it's not likely `inMemoryEpochs` is configured as 0, and this scenario rarely happen + * since we only use `inMemoryEpochs = 0` for testing, if it happens it's a good thing because it helps us test the reload flow + */ + private prunePerSlot = async (clockSlot: Slot): Promise => { + // a contabo vpss can have 10-12 holesky epoch transitions per epoch when syncing, stronger node may have more + // it's better to prune at the last 1/3 of every slot in order not to cache a lot of checkpoint states + // at synced time, it's likely we only prune at the 1st slot of epoch, all other prunes are no-op + const pruneCount = await this.chain.regen.pruneCheckpointStateCache(); + this.logger.verbose("Pruned checkpoint state cache", {clockSlot, pruneCount}); + }; } diff --git a/packages/beacon-node/src/chain/regen/interface.ts b/packages/beacon-node/src/chain/regen/interface.ts index e7be64d0eec..d81b18e917e 100644 --- a/packages/beacon-node/src/chain/regen/interface.ts +++ b/packages/beacon-node/src/chain/regen/interface.ts @@ -32,15 +32,19 @@ export type StateCloneOpts = { }; export interface IStateRegenerator extends IStateRegeneratorInternal { + init(): Promise; dropCache(): void; dumpCacheSummary(): routes.lodestar.StateCacheItem[]; getStateSync(stateRoot: RootHex): CachedBeaconStateAllForks | null; + getCheckpointStateOrBytes(cp: CheckpointHex): Promise; getCheckpointStateSync(cp: CheckpointHex): CachedBeaconStateAllForks | null; getClosestHeadState(head: ProtoBlock): CachedBeaconStateAllForks | null; + // TODO: remove once we go with n-historical state cache pruneOnCheckpoint(finalizedEpoch: Epoch, justifiedEpoch: Epoch, headStateRoot: RootHex): void; pruneOnFinalized(finalizedEpoch: Epoch): void; addPostState(postState: CachedBeaconStateAllForks): void; addCheckpointState(cp: phase0.Checkpoint, item: CachedBeaconStateAllForks): void; + pruneCheckpointStateCache(): Promise; updateHeadState(newHeadStateRoot: RootHex, maybeHeadState: CachedBeaconStateAllForks): void; updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; } diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index dd111f14b4d..ad5b230b281 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -4,7 +4,7 @@ import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition"; import {Logger} from "@lodestar/utils"; import {routes} from "@lodestar/api"; -import {CheckpointHex, CheckpointStateCache, StateContextCache, toCheckpointHex} from "../stateCache/index.js"; +import {CheckpointHex, CheckpointStateCache, BlockStateCache, toCheckpointHex} from "../stateCache/index.js"; import {Metrics} from "../../metrics/index.js"; import {JobItemQueue} from "../../util/queue/index.js"; import {IStateRegenerator, IStateRegeneratorInternal, RegenCaller, RegenFnName, StateCloneOpts} from "./interface.js"; @@ -34,7 +34,7 @@ export class QueuedStateRegenerator implements IStateRegenerator { private readonly regen: StateRegenerator; private readonly forkChoice: IForkChoice; - private readonly stateCache: StateContextCache; + private readonly stateCache: BlockStateCache; private readonly checkpointStateCache: CheckpointStateCache; private readonly metrics: Metrics | null; private readonly logger: Logger; @@ -53,6 +53,12 @@ export class QueuedStateRegenerator implements IStateRegenerator { this.logger = modules.logger; } + async init(): Promise { + if (this.checkpointStateCache.init) { + return this.checkpointStateCache.init(); + } + } + canAcceptWork(): boolean { return this.jobQueue.jobLen < REGEN_CAN_ACCEPT_WORK_THRESHOLD; } @@ -70,6 +76,10 @@ export class QueuedStateRegenerator implements IStateRegenerator { return this.stateCache.get(stateRoot); } + async getCheckpointStateOrBytes(cp: CheckpointHex): Promise { + return this.checkpointStateCache.getStateOrBytes(cp); + } + getCheckpointStateSync(cp: CheckpointHex): CachedBeaconStateAllForks | null { return this.checkpointStateCache.get(cp); } @@ -78,6 +88,7 @@ export class QueuedStateRegenerator implements IStateRegenerator { return this.checkpointStateCache.getLatest(head.blockRoot, Infinity) || this.stateCache.get(head.stateRoot); } + // TODO: remove this once we go with n-historical state pruneOnCheckpoint(finalizedEpoch: Epoch, justifiedEpoch: Epoch, headStateRoot: RootHex): void { this.checkpointStateCache.prune(finalizedEpoch, justifiedEpoch); this.stateCache.prune(headStateRoot); @@ -96,6 +107,10 @@ export class QueuedStateRegenerator implements IStateRegenerator { this.checkpointStateCache.add(cp, item); } + pruneCheckpointStateCache(): Promise { + return this.checkpointStateCache.pruneFromMemory(); + } + updateHeadState(newHeadStateRoot: RootHex, maybeHeadState: CachedBeaconStateAllForks): void { const headState = newHeadStateRoot === toHexString(maybeHeadState.hashTreeRoot()) @@ -103,14 +118,19 @@ export class QueuedStateRegenerator implements IStateRegenerator { : this.stateCache.get(newHeadStateRoot); if (headState) { + // TODO: use add() api instead once we go with n-historical state this.stateCache.setHeadState(headState); } else { // Trigger regen on head change if necessary this.logger.warn("Head state not available, triggering regen", {stateRoot: newHeadStateRoot}); + // it's important to reload state to regen head state here + const shouldReload = true; // head has changed, so the existing cached head state is no longer useful. Set strong reference to null to free // up memory for regen step below. During regen, node won't be functional but eventually head will be available + // TODO: remove this once we go with n-historical state this.stateCache.setHeadState(null); - this.regen.getState(newHeadStateRoot, RegenCaller.processBlock).then( + this.regen.getState(newHeadStateRoot, RegenCaller.processBlock, shouldReload).then( + // this move the headState to the front of the queue so it'll not be pruned right away (headStateRegen) => this.stateCache.setHeadState(headStateRegen), (e) => this.logger.error("Error on head state regen", {}, e) ); diff --git a/packages/beacon-node/src/chain/regen/regen.ts b/packages/beacon-node/src/chain/regen/regen.ts index 0d6bd89d8ce..27fbecfefb1 100644 --- a/packages/beacon-node/src/chain/regen/regen.ts +++ b/packages/beacon-node/src/chain/regen/regen.ts @@ -15,7 +15,7 @@ import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; import {Metrics} from "../../metrics/index.js"; import {IBeaconDb} from "../../db/index.js"; -import {CheckpointStateCache, StateContextCache} from "../stateCache/index.js"; +import {CheckpointStateCache, BlockStateCache} from "../stateCache/index.js"; import {getCheckpointFromState} from "../blocks/utils/checkpoint.js"; import {ChainEvent, ChainEventEmitter} from "../emitter.js"; import {IStateRegeneratorInternal, RegenCaller, StateCloneOpts} from "./interface.js"; @@ -24,7 +24,7 @@ import {RegenError, RegenErrorCode} from "./errors.js"; export type RegenModules = { db: IBeaconDb; forkChoice: IForkChoice; - stateCache: StateContextCache; + stateCache: BlockStateCache; checkpointStateCache: CheckpointStateCache; config: ChainForkConfig; emitter: ChainEventEmitter; @@ -33,6 +33,10 @@ export type RegenModules = { /** * Regenerates states that have already been processed by the fork choice + * Since Sep 2023, we support reloading checkpoint state from disk via shouldReload flag. Due to its performance impact + * this flag is only used in this case: + * - getPreState: this is for block processing, this is imporant for long unfinalized chain + * - updateHeadState: rarely happen, but it's important to make sure we always can regen head state */ export class StateRegenerator implements IStateRegeneratorInternal { constructor(private readonly modules: RegenModules) {} @@ -41,6 +45,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { * Get the state to run with `block`. May be: * - If parent is in same epoch -> Exact state at `block.parentRoot` * - If parent is in prev epoch -> State after `block.parentRoot` dialed forward through epoch transition + * - It's imporant to reload state if needed in this flow */ async getPreState( block: allForks.BeaconBlock, @@ -57,6 +62,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { const parentEpoch = computeEpochAtSlot(parentBlock.slot); const blockEpoch = computeEpochAtSlot(block.slot); + const shouldReload = true; // This may save us at least one epoch transition. // If the requested state crosses an epoch boundary @@ -64,11 +70,11 @@ export class StateRegenerator implements IStateRegeneratorInternal { // We may have the checkpoint state with parent root inside the checkpoint state cache // through gossip validation. if (parentEpoch < blockEpoch) { - return this.getCheckpointState({root: block.parentRoot, epoch: blockEpoch}, opts, rCaller); + return this.getCheckpointState({root: block.parentRoot, epoch: blockEpoch}, opts, rCaller, shouldReload); } - // Otherwise, get the state normally. - return this.getState(parentBlock.stateRoot, rCaller); + // Otherwise, get the state normally + return this.getState(parentBlock.stateRoot, rCaller, shouldReload); } /** @@ -77,20 +83,23 @@ export class StateRegenerator implements IStateRegeneratorInternal { async getCheckpointState( cp: phase0.Checkpoint, opts: StateCloneOpts, - rCaller: RegenCaller + rCaller: RegenCaller, + shouldReload = false ): Promise { const checkpointStartSlot = computeStartSlotAtEpoch(cp.epoch); - return this.getBlockSlotState(toHexString(cp.root), checkpointStartSlot, opts, rCaller); + return this.getBlockSlotState(toHexString(cp.root), checkpointStartSlot, opts, rCaller, shouldReload); } /** * Get state after block `blockRoot` dialed forward to `slot` + * - shouldReload should be used with care, as it will cause the state to be reloaded from disk */ async getBlockSlotState( blockRoot: RootHex, slot: Slot, opts: StateCloneOpts, - rCaller: RegenCaller + rCaller: RegenCaller, + shouldReload = false ): Promise { const block = this.modules.forkChoice.getBlockHex(blockRoot); if (!block) { @@ -108,7 +117,11 @@ export class StateRegenerator implements IStateRegeneratorInternal { }); } - const latestCheckpointStateCtx = this.modules.checkpointStateCache.getLatest(blockRoot, computeEpochAtSlot(slot)); + const {checkpointStateCache} = this.modules; + const getLatestApi = shouldReload + ? checkpointStateCache.getOrReloadLatest.bind(checkpointStateCache) + : checkpointStateCache.getLatest.bind(checkpointStateCache); + const latestCheckpointStateCtx = await getLatestApi(blockRoot, computeEpochAtSlot(slot)); // If a checkpoint state exists with the given checkpoint root, it either is in requested epoch // or needs to have empty slots processed until the requested epoch @@ -119,15 +132,16 @@ export class StateRegenerator implements IStateRegeneratorInternal { // Otherwise, use the fork choice to get the stateRoot from block at the checkpoint root // regenerate that state, // then process empty slots until the requested epoch - const blockStateCtx = await this.getState(block.stateRoot, rCaller); + const blockStateCtx = await this.getState(block.stateRoot, rCaller, shouldReload); return processSlotsByCheckpoint(this.modules, blockStateCtx, slot, opts); } /** * Get state by exact root. If not in cache directly, requires finding the block that references the state from the * forkchoice and replaying blocks to get to it. + * - shouldReload should be used with care, as it will cause the state to be reloaded from disk */ - async getState(stateRoot: RootHex, _rCaller: RegenCaller): Promise { + async getState(stateRoot: RootHex, _rCaller: RegenCaller, shouldReload = false): Promise { // Trivial case, state at stateRoot is already cached const cachedStateCtx = this.modules.stateCache.get(stateRoot); if (cachedStateCtx) { @@ -143,15 +157,17 @@ export class StateRegenerator implements IStateRegeneratorInternal { // gets reversed when replayed const blocksToReplay = [block]; let state: CachedBeaconStateAllForks | null = null; - for (const b of this.modules.forkChoice.iterateAncestorBlocks(block.parentRoot)) { + const {checkpointStateCache} = this.modules; + const getLatestApi = shouldReload + ? checkpointStateCache.getOrReloadLatest.bind(checkpointStateCache) + : checkpointStateCache.getLatest.bind(checkpointStateCache); + // iterateAncestorBlocks only returns ancestor blocks, not the block itself + for (const b of this.modules.forkChoice.iterateAncestorBlocks(block.blockRoot)) { state = this.modules.stateCache.get(b.stateRoot); if (state) { break; } - state = this.modules.checkpointStateCache.getLatest( - b.blockRoot, - computeEpochAtSlot(blocksToReplay[blocksToReplay.length - 1].slot - 1) - ); + state = await getLatestApi(b.blockRoot, computeEpochAtSlot(blocksToReplay[blocksToReplay.length - 1].slot - 1)); if (state) { break; } @@ -198,8 +214,10 @@ export class StateRegenerator implements IStateRegeneratorInternal { null ); - // TODO: Persist states, note that regen could be triggered by old states. - // Should those take a place in the cache? + if (shouldReload) { + // also with shouldReload flag, we "reload" it to the state cache too + this.modules.stateCache.add(state); + } // this avoids keeping our node busy processing blocks await sleep(0); @@ -216,7 +234,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { private findFirstStateBlock(stateRoot: RootHex): ProtoBlock { for (const block of this.modules.forkChoice.forwarditerateAncestorBlocks()) { - if (block !== undefined) { + if (block.stateRoot === stateRoot) { return block; } } @@ -263,7 +281,7 @@ async function processSlotsToNearestCheckpoint( const postSlot = slot; const preEpoch = computeEpochAtSlot(preSlot); let postState = preState; - const {checkpointStateCache, emitter, metrics} = modules; + const {emitter, metrics, checkpointStateCache} = modules; for ( let nextEpochSlot = computeStartSlotAtEpoch(preEpoch + 1); @@ -273,7 +291,10 @@ async function processSlotsToNearestCheckpoint( // processSlots calls .clone() before mutating postState = processSlots(postState, nextEpochSlot, opts, metrics); - // Cache state to preserve epoch transition work + // this is usually added when we prepare for next slot or validate gossip block + // then when we process the 1st block of epoch, we don't have to do state transition again + // This adds Previous Root Checkpoint State to the checkpoint state cache + // This may becomes the "official" checkpoint state if the 1st block of epoch is skipped const checkpointState = postState; const cp = getCheckpointFromState(checkpointState); checkpointStateCache.add(cp, checkpointState); diff --git a/packages/beacon-node/src/chain/shufflingCache.ts b/packages/beacon-node/src/chain/shufflingCache.ts new file mode 100644 index 00000000000..ffd528a4327 --- /dev/null +++ b/packages/beacon-node/src/chain/shufflingCache.ts @@ -0,0 +1,61 @@ +import {CachedBeaconStateAllForks, EpochShuffling, getShufflingDecisionBlock} from "@lodestar/state-transition"; +import {Epoch, RootHex} from "@lodestar/types"; +import {Metrics} from "../metrics/metrics.js"; + +/** + * Same value to CheckpointBalancesCache, with the assumption that we don't have to use it for old epochs. In the worse case: + * - when loading state bytes from disk, we need to compute shuffling for all epochs (~1s as of Sep 2023) + * - don't have shuffling to verify attestations, need to do 1 epoch transition to add shuffling to this cache. This never happens + * with default chain option of maxSkipSlots = 32 + **/ +const MAX_SHUFFLING_CACHE_SIZE = 4; + +type ShufflingCacheItem = { + decisionBlockHex: RootHex; + shuffling: EpochShuffling; +}; + +/** + * A shuffling cache to help: + * - get committee quickly for attestation verification + * - skip computing shuffling when loading state bytes from disk + */ +export class ShufflingCache { + private readonly items: ShufflingCacheItem[] = []; + + constructor(metrics: Metrics | null = null) { + if (metrics) { + metrics.shufflingCache.size.addCollect(() => metrics.shufflingCache.size.set(this.items.length)); + } + } + + processState(state: CachedBeaconStateAllForks, shufflingEpoch: Epoch): void { + const decisionBlockHex = getShufflingDecisionBlock(state, shufflingEpoch); + const index = this.items.findIndex( + (item) => item.shuffling.epoch === shufflingEpoch && item.decisionBlockHex === decisionBlockHex + ); + if (index === -1) { + if (this.items.length === MAX_SHUFFLING_CACHE_SIZE) { + this.items.shift(); + } + let shuffling: EpochShuffling; + if (shufflingEpoch === state.epochCtx.nextShuffling.epoch) { + shuffling = state.epochCtx.nextShuffling; + } else if (shufflingEpoch === state.epochCtx.currentShuffling.epoch) { + shuffling = state.epochCtx.currentShuffling; + } else if (shufflingEpoch === state.epochCtx.previousShuffling.epoch) { + shuffling = state.epochCtx.previousShuffling; + } else { + throw new Error(`Shuffling not found from state ${state.slot} for epoch ${shufflingEpoch}`); + } + this.items.push({decisionBlockHex, shuffling}); + } + } + + get(shufflingEpoch: Epoch, dependentRootHex: RootHex): EpochShuffling | null { + return ( + this.items.find((item) => item.shuffling.epoch === shufflingEpoch && item.decisionBlockHex === dependentRootHex) + ?.shuffling ?? null + ); + } +} diff --git a/packages/beacon-node/src/chain/stateCache/index.ts b/packages/beacon-node/src/chain/stateCache/index.ts index 69fb34a77e4..e8a1f394ee2 100644 --- a/packages/beacon-node/src/chain/stateCache/index.ts +++ b/packages/beacon-node/src/chain/stateCache/index.ts @@ -1,2 +1,3 @@ -export * from "./stateContextCache.js"; -export * from "./stateContextCheckpointsCache.js"; +export * from "./lruBlockStateCache.js"; +export * from "./persistentCheckpointsCache.js"; +export * from "./types.js"; diff --git a/packages/beacon-node/src/chain/stateCache/lruBlockStateCache.ts b/packages/beacon-node/src/chain/stateCache/lruBlockStateCache.ts new file mode 100644 index 00000000000..38810eeb55a --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/lruBlockStateCache.ts @@ -0,0 +1,146 @@ +import {toHexString} from "@chainsafe/ssz"; +import {Epoch, RootHex} from "@lodestar/types"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {routes} from "@lodestar/api"; +import {Metrics} from "../../metrics/index.js"; +import {LinkedList} from "../../util/array.js"; +import {MapTracker} from "./mapMetrics.js"; +import {BlockStateCache} from "./types.js"; + +export type LRUBlockStateCacheOpts = { + maxStates: number; +}; + +/** + * New implementation of BlockStateCache that keeps the most recent n states consistently + * - Prune per add() instead of per checkpoint so it only keeps n historical states consistently + * - This is LRU like cache except that we only track the last added time, not the last used time + * because state could be fetched from multiple places, but we only care about the last added time. + * - No need to set a separate head state, the head state is always the first item in the list + */ +export class LRUBlockStateCache implements BlockStateCache { + /** + * Max number of states allowed in the cache + */ + readonly maxStates: number; + + private readonly cache: MapTracker; + /** Epoch -> Set */ + private readonly epochIndex = new Map>(); + // key order to implement LRU like cache + private readonly keyOrder: LinkedList; + private readonly metrics: Metrics["stateCache"] | null | undefined; + + constructor(opts: LRUBlockStateCacheOpts, {metrics}: {maxStates?: number; metrics?: Metrics | null}) { + this.maxStates = opts.maxStates; + this.cache = new MapTracker(metrics?.stateCache); + if (metrics) { + this.metrics = metrics.stateCache; + metrics.stateCache.size.addCollect(() => metrics.stateCache.size.set(this.cache.size)); + } + this.keyOrder = new LinkedList(); + } + + /** + * This implementation always move head state to the head of the list + * so no need to set a separate head state + * However this is to be consistent with the old StateContextCache + * TODO: remove this method, consumer should go with add() api instead + */ + setHeadState(item: CachedBeaconStateAllForks | null): void { + if (item !== null) { + this.add(item); + } + } + + get(rootHex: RootHex): CachedBeaconStateAllForks | null { + this.metrics?.lookups.inc(); + const item = this.cache.get(rootHex); + if (!item) { + return null; + } + + this.metrics?.hits.inc(); + this.metrics?.stateClonedCount.observe(item.clonedCount); + + return item; + } + + add(item: CachedBeaconStateAllForks): void { + const key = toHexString(item.hashTreeRoot()); + if (this.cache.get(key)) { + this.keyOrder.moveToHead(key); + // same size, no prune + return; + } + this.metrics?.adds.inc(); + this.cache.set(key, item); + const epoch = item.epochCtx.epoch; + const blockRoots = this.epochIndex.get(epoch); + if (blockRoots) { + blockRoots.add(key); + } else { + this.epochIndex.set(epoch, new Set([key])); + } + this.keyOrder.unshift(key); + this.prune(); + } + + clear(): void { + this.cache.clear(); + this.epochIndex.clear(); + } + + get size(): number { + return this.cache.size; + } + + /** + * If a recent state is not available, regen from the checkpoint state. + * Given state 0 => 1 => ... => n, if regen adds back state 0 we should not remove it right away. + * The LRU-like cache helps with this. + */ + prune(): void { + while (this.keyOrder.length > this.maxStates) { + const key = this.keyOrder.pop(); + if (!key) { + // should not happen + throw new Error("No key"); + } + const item = this.cache.get(key); + if (item) { + this.epochIndex.get(item.epochCtx.epoch)?.delete(key); + this.cache.delete(key); + } + } + } + + /** + * Prune per finalized epoch. + */ + deleteAllBeforeEpoch(finalizedEpoch: Epoch): void { + for (const epoch of this.epochIndex.keys()) { + if (epoch < finalizedEpoch) { + this.deleteAllEpochItems(epoch); + } + } + } + + /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */ + dumpSummary(): routes.lodestar.StateCacheItem[] { + return Array.from(this.cache.entries()).map(([key, state]) => ({ + slot: state.slot, + root: toHexString(state.hashTreeRoot()), + reads: this.cache.readCount.get(key) ?? 0, + lastRead: this.cache.lastRead.get(key) ?? 0, + checkpointState: false, + })); + } + + private deleteAllEpochItems(epoch: Epoch): void { + for (const rootHex of this.epochIndex.get(epoch) || []) { + this.cache.delete(rootHex); + } + this.epochIndex.delete(epoch); + } +} diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/memoryCheckpointsCache.ts similarity index 86% rename from packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts rename to packages/beacon-node/src/chain/stateCache/memoryCheckpointsCache.ts index 0cb48f0e2de..e9b6d83a202 100644 --- a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/memoryCheckpointsCache.ts @@ -5,17 +5,16 @@ import {MapDef} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import {Metrics} from "../../metrics/index.js"; import {MapTracker} from "./mapMetrics.js"; +import {CheckpointHex, CheckpointStateCache} from "./types.js"; -export type CheckpointHex = {epoch: Epoch; rootHex: RootHex}; const MAX_EPOCHS = 10; /** - * In memory cache of CachedBeaconState - * belonging to checkpoint + * Old implementation of CheckpointStateCache that only store checkpoint states in memory * * Similar API to Repository */ -export class CheckpointStateCache { +export class MemoryCheckpointStateCache implements CheckpointStateCache { private readonly cache: MapTracker; /** Epoch -> Set */ private readonly epochIndex = new MapDef>(() => new Set()); @@ -32,6 +31,23 @@ export class CheckpointStateCache { } } + async getOrReload(cp: CheckpointHex): Promise { + return this.get(cp); + } + + async getStateOrBytes(cp: CheckpointHex): Promise { + return this.get(cp); + } + + async getOrReloadLatest(rootHex: string, maxEpoch: number): Promise { + return this.getLatest(rootHex, maxEpoch); + } + + pruneFromMemory(): Promise { + // do nothing, this method does not support pruning + return Promise.resolve(0); + } + get(cp: CheckpointHex): CachedBeaconStateAllForks | null { this.metrics?.lookups.inc(); const cpKey = toCheckpointKey(cp); diff --git a/packages/beacon-node/src/chain/stateCache/persistent/db.ts b/packages/beacon-node/src/chain/stateCache/persistent/db.ts new file mode 100644 index 00000000000..c94aa208ca1 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/persistent/db.ts @@ -0,0 +1,37 @@ +import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {IBeaconDb} from "../../../db/interface.js"; +import {CPStatePersistentApis, PersistentKey} from "./types.js"; + +/** + * Implementation of CPStatePersistentApis using db. + */ +export class DbPersistentApis implements CPStatePersistentApis { + constructor(private readonly db: IBeaconDb) {} + + async write(_: string, state: CachedBeaconStateAllForks): Promise { + const root = state.hashTreeRoot(); + const stateBytes = state.serialize(); + await this.db.checkpointState.putBinary(root, stateBytes); + return toHexString(root); + } + + async remove(persistentKey: PersistentKey): Promise { + await this.db.checkpointState.delete(fromHexString(persistentKey)); + return true; + } + + async read(persistentKey: string): Promise { + return this.db.checkpointState.getBinary(fromHexString(persistentKey)); + } + + /** + * Clean all checkpoint state in db at startup time. + */ + async init(): Promise { + const keyStream = this.db.checkpointState.keysStream(); + for await (const key of keyStream) { + await this.db.checkpointState.delete(key); + } + } +} diff --git a/packages/beacon-node/src/chain/stateCache/persistent/file.ts b/packages/beacon-node/src/chain/stateCache/persistent/file.ts new file mode 100644 index 00000000000..70a9181081e --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/persistent/file.ts @@ -0,0 +1,53 @@ +import fs from "node:fs"; +import path from "node:path"; +import {removeFile, writeIfNotExist, ensureDir, readAllFileNames} from "@lodestar/utils"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {CheckpointKey} from "../types.js"; +import {CPStatePersistentApis, PersistentKey} from "./types.js"; + +/** + * Implementation of CPStatePersistentApis using file system, this is beneficial for debugging. + */ +export class FilePersistentApis implements CPStatePersistentApis { + constructor(private readonly folderPath: string) {} + + /** + * Writing to file name with `${cp.rootHex}_${cp.epoch}` helps debugging. + * This is slow code as it do state serialization which takes 600ms to 900ms on holesky. + */ + async write(checkpointKey: CheckpointKey, state: CachedBeaconStateAllForks): Promise { + const stateBytes = state.serialize(); + const persistentKey = this.toPersistentKey(checkpointKey); + await writeIfNotExist(persistentKey, stateBytes); + return persistentKey; + } + + async remove(persistentKey: PersistentKey): Promise { + return removeFile(persistentKey); + } + + async read(persistentKey: PersistentKey): Promise { + try { + const stateBytes = await fs.promises.readFile(persistentKey); + return stateBytes; + } catch (_) { + return null; + } + } + + async init(): Promise { + try { + await ensureDir(this.folderPath); + const fileNames = await readAllFileNames(this.folderPath); + for (const fileName of fileNames) { + await removeFile(path.join(this.folderPath, fileName)); + } + } catch (_) { + // do nothing + } + } + + private toPersistentKey(checkpointKey: CheckpointKey): PersistentKey { + return path.join(this.folderPath, checkpointKey); + } +} diff --git a/packages/beacon-node/src/chain/stateCache/persistent/types.ts b/packages/beacon-node/src/chain/stateCache/persistent/types.ts new file mode 100644 index 00000000000..e85fb105194 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/persistent/types.ts @@ -0,0 +1,13 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {CheckpointKey} from "../types.js"; + +// With fs implementation, persistentKey is ${CHECKPOINT_STATES_FOLDER/rootHex_epoch} +export type PersistentKey = string; + +// Make this generic to support testing +export interface CPStatePersistentApis { + write: (cpKey: CheckpointKey, state: CachedBeaconStateAllForks) => Promise; + remove: (persistentKey: PersistentKey) => Promise; + read: (persistentKey: PersistentKey) => Promise; + init: () => Promise; +} diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts new file mode 100644 index 00000000000..cb679e3da5f --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -0,0 +1,534 @@ +import {toHexString} from "@chainsafe/ssz"; +import {phase0, Epoch, RootHex} from "@lodestar/types"; +import {CachedBeaconStateAllForks, computeStartSlotAtEpoch, getBlockRootAtSlot} from "@lodestar/state-transition"; +import {Logger, MapDef} from "@lodestar/utils"; +import {routes} from "@lodestar/api"; +import {loadCachedBeaconState} from "@lodestar/state-transition"; +import {Metrics} from "../../metrics/index.js"; +import {IClock} from "../../util/clock.js"; +import {ShufflingCache} from "../shufflingCache.js"; +import {MapTracker} from "./mapMetrics.js"; +import { + CacheType, + CheckpointHex, + PersistentCheckpointStateCacheModules, + PersistentCheckpointStateCacheOpts, + GetHeadStateFn, + RemovePersistedStateReason, + CheckpointStateCache, + CheckpointKey, +} from "./types.js"; +import {CPStatePersistentApis, PersistentKey} from "./persistent/types.js"; + +/** + * An implementation of CheckpointStateCache that keep up to n epoch checkpoint states in memory and persist the rest to disk + * - If it's more than `maxEpochsInMemory` epochs old, it will be persisted to disk following LRU cache + * - Once a chain gets finalized we'll prune all states from memory and disk for epochs < finalizedEpoch + * - In get*() apis if shouldReload is true, it will reload from disk. The reload() api is expensive (as with Holesky, it takes ~1.5s to load and could be + * up 2s-3s in total for the hashTreeRoot() ) and should only be called in some important flows: + * - Get state for block processing + * - updateHeadState + * - as with any cache, the state could be evicted from memory at any time, so we should always check if the state is in memory or not + * - For each epoch, we only persist exactly 1 (official) checkpoint state and prune the other one because it's enough for the regen. The persisted (official) + * checkpoint state could be finalized and used later in archive task. The "official" checkpoint state is defined at: https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.2/specs/phase0/beacon-chain.md + * + * - If there is Current Root Checkpoint State, we persist that state to disk and delete the Previous Root Checkpoint State + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + * + * - If there is no Current Root Checkpoint State, we persist the Previous Root Checkpoint State to disk + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + * + * The below diagram shows Previous Root Checkpoint State is persisted for epoch (n-2) and Current Root Checkpoint State is persisted for epoch (n-1) + * while at epoch (n) and (n+1) we have both of them in memory + * + * ╔════════════════════════════════════╗═══════════════╗ + * ║ persisted to db or fs ║ in memory ║ + * ║ reload if needed ║ ║ + * ║ -----------------------------------║---------------║ + * ║ epoch: (n-2) (n-1) ║ n (n+1) ║ + * ║ |-------|-------|----║--|-------|----║ + * ║ ^ ^ ║ ^ ^ ║ + * ║ ║ ^ ^ ║ + * ╚════════════════════════════════════╝═══════════════╝ + */ +export class PersistentCheckpointStateCache implements CheckpointStateCache { + private readonly cache: MapTracker; + // maintain order of epoch to decide which epoch to prune from memory + private readonly inMemoryEpochs: Set; + /** Epoch -> Set */ + private readonly epochIndex = new MapDef>(() => new Set()); + private readonly metrics: Metrics["cpStateCache"] | null | undefined; + private readonly logger: Logger; + private readonly clock: IClock | null | undefined; + private preComputedCheckpoint: string | null = null; + private preComputedCheckpointHits: number | null = null; + private readonly maxEpochsInMemory: number; + private readonly persistentApis: CPStatePersistentApis; + private readonly shufflingCache: ShufflingCache; + private readonly getHeadState?: GetHeadStateFn; + + constructor( + {metrics, logger, clock, shufflingCache, persistentApis, getHeadState}: PersistentCheckpointStateCacheModules, + opts: PersistentCheckpointStateCacheOpts + ) { + this.cache = new MapTracker(metrics?.cpStateCache); + if (metrics) { + this.metrics = metrics.cpStateCache; + metrics.cpStateCache.size.addCollect(() => { + let persistCount = 0; + let stateCount = 0; + const memoryEpochs = new Set(); + const persistentEpochs = new Set(); + for (const [key, stateOrPersistentKey] of this.cache.entries()) { + const {epoch} = fromCheckpointKey(key); + if (isPersistentKey(stateOrPersistentKey)) { + persistCount++; + memoryEpochs.add(epoch); + } else { + stateCount++; + persistentEpochs.add(epoch); + } + } + metrics.cpStateCache.size.set({type: CacheType.persistence}, persistCount); + metrics.cpStateCache.size.set({type: CacheType.state}, stateCount); + metrics.cpStateCache.epochSize.set({type: CacheType.persistence}, persistentEpochs.size); + metrics.cpStateCache.epochSize.set({type: CacheType.state}, memoryEpochs.size); + }); + } + this.logger = logger; + this.clock = clock; + if (opts.maxEpochsInMemory < 0) { + throw new Error("maxEpochsInMemory must be >= 0"); + } + this.maxEpochsInMemory = opts.maxEpochsInMemory; + // Specify different persistentApis for testing + this.persistentApis = persistentApis; + this.shufflingCache = shufflingCache; + this.getHeadState = getHeadState; + this.inMemoryEpochs = new Set(); + } + + async init(): Promise { + return this.persistentApis.init(); + } + + /** + * Get a state from cache, it will reload from disk. + * This is expensive api, should only be called in some important flows: + * - Validate a gossip block + * - Get block for processing + * - Regen head state + */ + async getOrReload(cp: CheckpointHex): Promise { + const cpKey = toCheckpointKey(cp); + const inMemoryState = this.get(cpKey); + if (inMemoryState) { + return inMemoryState; + } + + const persistentKey = this.cache.get(cpKey); + if (persistentKey === undefined) { + return null; + } + + if (!isPersistentKey(persistentKey)) { + // should not happen, in-memory state is handled above + throw new Error("Expected persistent key"); + } + + // reload from disk or db based on closest checkpoint + this.logger.verbose("Reload: read state", {persistentKey}); + const newStateBytes = await this.persistentApis.read(persistentKey); + if (newStateBytes === null) { + this.logger.verbose("Reload: read state failed", {persistentKey}); + return null; + } + this.logger.verbose("Reload: read state successfully", {persistentKey}); + this.metrics?.stateRemoveCount.inc({reason: RemovePersistedStateReason.reload}); + this.metrics?.stateReloadSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); + const closestState = findClosestCheckpointState(cp, this.cache) ?? this.getHeadState?.(); + if (closestState == null) { + throw new Error("No closest state found for cp " + toCheckpointKey(cp)); + } + this.metrics?.stateReloadEpochDiff.observe(Math.abs(closestState.epochCtx.epoch - cp.epoch)); + this.logger.verbose("Reload: found closest state", {persistentKey, seedSlot: closestState.slot}); + const timer = this.metrics?.stateReloadDuration.startTimer(); + + try { + const newCachedState = loadCachedBeaconState(closestState, newStateBytes, { + shufflingGetter: this.shufflingCache.get.bind(this.shufflingCache), + }); + timer?.(); + this.logger.verbose("Reload state successfully", { + persistentKey, + stateSlot: newCachedState.slot, + seedSlot: closestState.slot, + }); + // only remove persisted state once we reload successfully + void this.persistentApis.remove(persistentKey); + this.cache.set(cpKey, newCachedState); + this.inMemoryEpochs.add(cp.epoch); + // don't prune from memory here, call it at the last 1/3 of slot 0 of an epoch + return newCachedState; + } catch (e) { + this.logger.debug("Error reloading state from disk", {persistentKey}, e as Error); + return null; + } + return null; + } + + /** + * Return either state or state bytes without reloading from disk. + */ + async getStateOrBytes(cp: CheckpointHex): Promise { + const cpKey = toCheckpointKey(cp); + const inMemoryState = this.get(cpKey); + if (inMemoryState) { + return inMemoryState; + } + + const persistentKey = this.cache.get(cpKey); + if (persistentKey === undefined) { + return null; + } + + if (!isPersistentKey(persistentKey)) { + // should not happen, in-memory state is handled above + throw new Error("Expected persistent key"); + } + + return this.persistentApis.read(persistentKey); + } + + /** + * Similar to get() api without reloading from disk + */ + get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null { + this.metrics?.lookups.inc(); + const cpKey = typeof cpOrKey === "string" ? cpOrKey : toCheckpointKey(cpOrKey); + const stateOrPersistentKey = this.cache.get(cpKey); + + if (stateOrPersistentKey === undefined) { + return null; + } + + this.metrics?.hits.inc(); + + if (cpKey === this.preComputedCheckpoint) { + this.preComputedCheckpointHits = (this.preComputedCheckpointHits ?? 0) + 1; + } + + if (!isPersistentKey(stateOrPersistentKey)) { + this.metrics?.stateClonedCount.observe(stateOrPersistentKey.clonedCount); + return stateOrPersistentKey; + } + + return null; + } + + /** + * Add a state of a checkpoint to this cache, prune from memory if necessary. + */ + add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void { + const cpHex = toCheckpointHex(cp); + const key = toCheckpointKey(cpHex); + const stateOrPersistentKey = this.cache.get(key); + this.inMemoryEpochs.add(cp.epoch); + if (stateOrPersistentKey !== undefined) { + if (isPersistentKey(stateOrPersistentKey)) { + // was persisted to disk, set back to memory + this.cache.set(key, state); + void this.persistentApis.remove(stateOrPersistentKey); + this.metrics?.stateRemoveCount.inc({reason: RemovePersistedStateReason.stateUpdate}); + } + return; + } + this.metrics?.adds.inc(); + this.cache.set(key, state); + this.epochIndex.getOrDefault(cp.epoch).add(cpHex.rootHex); + // don't prune from memory here, call it at the last 1/3 of slot 0 of an epoch + } + + /** + * Searches in-memory state for the latest cached state with a `root` without reload, starting with `epoch` and descending + */ + getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null { + // sort epochs in descending order, only consider epochs lte `epoch` + const epochs = Array.from(this.epochIndex.keys()) + .sort((a, b) => b - a) + .filter((e) => e <= maxEpoch); + for (const epoch of epochs) { + if (this.epochIndex.get(epoch)?.has(rootHex)) { + const inMemoryState = this.get({rootHex, epoch}); + if (inMemoryState) { + return inMemoryState; + } + } + } + return null; + } + + /** + * Searches state for the latest cached state with a `root`, reload if needed, starting with `epoch` and descending + * This is expensive api, should only be called in some important flows: + * - Validate a gossip block + * - Get block for processing + * - Regen head state + */ + async getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise { + // sort epochs in descending order, only consider epochs lte `epoch` + const epochs = Array.from(this.epochIndex.keys()) + .sort((a, b) => b - a) + .filter((e) => e <= maxEpoch); + for (const epoch of epochs) { + if (this.epochIndex.get(epoch)?.has(rootHex)) { + try { + const state = await this.getOrReload({rootHex, epoch}); + if (state) { + return state; + } + } catch (e) { + this.logger.debug("Error get or reload state", {epoch, rootHex}, e as Error); + } + } + } + return null; + } + + /** + * Update the precomputed checkpoint and return the number of his for the + * previous one (if any). + */ + updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null { + const previousHits = this.preComputedCheckpointHits; + this.preComputedCheckpoint = toCheckpointKey({rootHex, epoch}); + this.preComputedCheckpointHits = 0; + return previousHits; + } + + /** + * This is just to conform to the old implementation + */ + prune(): void { + // do nothing + } + + pruneFinalized(finalizedEpoch: Epoch): void { + for (const epoch of this.epochIndex.keys()) { + if (epoch < finalizedEpoch) { + this.deleteAllEpochItems(epoch).catch((e) => + this.logger.debug("Error delete all epoch items", {epoch, finalizedEpoch}, e as Error) + ); + } + } + } + + /** + * For testing only + */ + delete(cp: phase0.Checkpoint): void { + const key = toCheckpointKey(toCheckpointHex(cp)); + this.cache.delete(key); + // check if there's any state left in memory for this epoch + let foundState = false; + for (const rootHex of this.epochIndex.get(cp.epoch)?.values() || []) { + const cpKey = toCheckpointKey({epoch: cp.epoch, rootHex}); + const stateOrPersistentKey = this.cache.get(cpKey); + if (stateOrPersistentKey !== undefined && !isPersistentKey(stateOrPersistentKey)) { + // this is a state + foundState = true; + break; + } + } + if (!foundState) { + this.inMemoryEpochs.delete(cp.epoch); + } + const epochKey = toHexString(cp.root); + const value = this.epochIndex.get(cp.epoch); + if (value) { + value.delete(epochKey); + if (value.size === 0) { + this.epochIndex.delete(cp.epoch); + } + } + } + + /** + * Delete all items of an epoch from disk and memory + */ + async deleteAllEpochItems(epoch: Epoch): Promise { + for (const rootHex of this.epochIndex.get(epoch) || []) { + const key = toCheckpointKey({rootHex, epoch}); + const stateOrPersistentKey = this.cache.get(key); + if (stateOrPersistentKey !== undefined && isPersistentKey(stateOrPersistentKey)) { + await this.persistentApis.remove(stateOrPersistentKey); + this.metrics?.stateRemoveCount.inc({reason: RemovePersistedStateReason.pruneFinalized}); + } + this.cache.delete(key); + } + this.inMemoryEpochs.delete(epoch); + this.epochIndex.delete(epoch); + } + + /** + * This is slow code because it involves serializing the whole state to disk which takes 600ms to 900ms on Holesky as of Sep 2023 + * The add() is called after we process 1st block of an epoch, we don't want to pruneFromMemory at that time since it's the hot time + * Call this code at the last 1/3 slot of slot 0 of an epoch + */ + async pruneFromMemory(): Promise { + let count = 0; + while (this.inMemoryEpochs.size > this.maxEpochsInMemory) { + let firstEpoch: Epoch | undefined; + for (const epoch of this.inMemoryEpochs) { + firstEpoch = epoch; + break; + } + if (firstEpoch === undefined) { + // should not happen + throw new Error("No epoch in memory"); + } + // first loop to check if the 1st slot of epoch is a skipped slot or not + let firstSlotBlockRoot: string | undefined; + for (const rootHex of this.epochIndex.get(firstEpoch) ?? []) { + const cpKey = toCheckpointKey({epoch: firstEpoch, rootHex}); + const stateOrPersistentKey = this.cache.get(cpKey); + if (stateOrPersistentKey !== undefined && !isPersistentKey(stateOrPersistentKey)) { + // this is a state + if ( + rootHex !== toHexString(getBlockRootAtSlot(stateOrPersistentKey, computeStartSlotAtEpoch(firstEpoch) - 1)) + ) { + firstSlotBlockRoot = rootHex; + break; + } + } + } + + // if found firstSlotBlockRoot it means it's Current Root Checkpoint State and we should only persist that checkpoint as it's the state + // that will be justified/finalized later, delete the Previous Root Checkpoint State + // if not found firstSlotBlockRoot, first slot of state is skipped, we should persist the Previous Root Checkpoint State, where the root + // is the last block slot root of pervious epoch. In this case Previous Root Checkpoint State would become the justified/finalized state. + for (const rootHex of this.epochIndex.get(firstEpoch) ?? []) { + let toPersist = false; + let toDelete = false; + if (firstSlotBlockRoot === undefined) { + toPersist = true; + } else { + if (rootHex === firstSlotBlockRoot) { + toPersist = true; + } else { + toDelete = true; + } + } + const cpKey = toCheckpointKey({epoch: firstEpoch, rootHex}); + const stateOrPersistentKey = this.cache.get(cpKey); + if (stateOrPersistentKey !== undefined && !isPersistentKey(stateOrPersistentKey)) { + if (toPersist) { + // do not update epochIndex + this.metrics?.statePersistSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); + const timer = this.metrics?.statePersistDuration.startTimer(); + const persistentKey = await this.persistentApis.write(cpKey, stateOrPersistentKey); + timer?.(); + this.cache.set(cpKey, persistentKey); + count++; + this.logger.verbose("Prune checkpoint state from memory and persist to disk", { + persistentKey, + stateSlot: stateOrPersistentKey.slot, + rootHex, + }); + } else if (toDelete) { + this.cache.delete(cpKey); + this.metrics?.statePruneFromMemoryCount.inc(); + this.logger.verbose("Prune checkpoint state from memory", {stateSlot: stateOrPersistentKey.slot, rootHex}); + } + } + } + + this.inMemoryEpochs.delete(firstEpoch); + } + + return count; + } + + clear(): void { + this.cache.clear(); + this.epochIndex.clear(); + } + + /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */ + dumpSummary(): routes.lodestar.StateCacheItem[] { + return Array.from(this.cache.keys()).map((key) => { + const cp = fromCheckpointKey(key); + const stateOrPersistentKey = this.cache.get(key); + return { + slot: computeStartSlotAtEpoch(cp.epoch), + root: cp.rootHex, + reads: this.cache.readCount.get(key) ?? 0, + lastRead: this.cache.lastRead.get(key) ?? 0, + checkpointState: true, + persistentKey: + stateOrPersistentKey !== undefined && isPersistentKey(stateOrPersistentKey) + ? stateOrPersistentKey + : undefined, + }; + }); + } + + /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */ + dumpCheckpointKeys(): string[] { + return Array.from(this.cache.keys()); + } +} + +/** + * Find closest state from cache to provided checkpoint. + * Note that in 0-historical state configuration, this could return null and we should get head state in that case. + */ +export function findClosestCheckpointState( + cp: CheckpointHex, + cache: Map +): CachedBeaconStateAllForks | null { + let smallestEpochDiff = Infinity; + let closestState: CachedBeaconStateAllForks | null = null; + for (const [key, value] of cache.entries()) { + // ignore entries with PersistentKey + if (isPersistentKey(value)) { + continue; + } + const epochDiff = Math.abs(cp.epoch - fromCheckpointKey(key).epoch); + if (epochDiff < smallestEpochDiff) { + smallestEpochDiff = epochDiff; + closestState = value; + } + } + + return closestState; +} + +export function toCheckpointHex(checkpoint: phase0.Checkpoint): CheckpointHex { + return { + epoch: checkpoint.epoch, + rootHex: toHexString(checkpoint.root), + }; +} + +export function toCheckpointKey(cp: CheckpointHex): CheckpointKey { + return `${cp.rootHex}_${cp.epoch}`; +} + +export function fromCheckpointKey(key: CheckpointKey): CheckpointHex { + const [rootHex, epoch] = key.split("_"); + return { + rootHex, + epoch: Number(epoch), + }; +} + +function isPersistentKey( + stateOrPersistentKey: CachedBeaconStateAllForks | PersistentKey +): stateOrPersistentKey is PersistentKey { + return (stateOrPersistentKey as CachedBeaconStateAllForks).epochCtx === undefined; +} diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts index 44523abf799..3a04c4f4a25 100644 --- a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts +++ b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts @@ -4,15 +4,16 @@ import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; import {Metrics} from "../../metrics/index.js"; import {MapTracker} from "./mapMetrics.js"; +import {BlockStateCache} from "./types.js"; const MAX_STATES = 3 * 32; /** - * In memory cache of CachedBeaconState - * - * Similar API to Repository + * Old implementation of StateCache + * - Prune per checkpoint so number of states ranges from 96 to 128 + * - Keep a separate head state to make sure it is always available */ -export class StateContextCache { +export class StateContextCache implements BlockStateCache { /** * Max number of states allowed in the cache */ diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts new file mode 100644 index 00000000000..5b4d4586279 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -0,0 +1,89 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {Epoch, RootHex, phase0} from "@lodestar/types"; +import {Logger} from "@lodestar/utils"; +import {routes} from "@lodestar/api"; +import {Metrics} from "../../metrics/index.js"; +import {IClock} from "../../util/clock.js"; +import {ShufflingCache} from "../shufflingCache.js"; +import {CPStatePersistentApis} from "./persistent/types.js"; + +export type CheckpointHex = {epoch: Epoch; rootHex: RootHex}; + +/** + * Store up to n recent block states. + */ +export interface BlockStateCache { + get(rootHex: RootHex): CachedBeaconStateAllForks | null; + add(item: CachedBeaconStateAllForks): void; + setHeadState(item: CachedBeaconStateAllForks | null): void; + clear(): void; + size: number; + prune(headStateRootHex: RootHex): void; + deleteAllBeforeEpoch(finalizedEpoch: Epoch): void; + dumpSummary(): routes.lodestar.StateCacheItem[]; +} + +/** + * Store checkpoint states to preserve epoch transition, this helps lodestar run exactly 1 epoch transition per epoch + * There are 2 types of checkpoint states: + * + * - Previous Root Checkpoint State where root is from previous epoch, this is added when we prepare for next slot, + * or to validate gossip block + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + * + * - Current Root Checkpoint State: this is added when we process block slot 0 of epoch n, note that this block could + * be skipped so we don't always have this checkpoint state + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + */ +export interface CheckpointStateCache { + init?: () => Promise; + getOrReload(cp: CheckpointHex): Promise; + getStateOrBytes(cp: CheckpointHex): Promise; + get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null; + add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void; + getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null; + getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise; + updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; + prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void; + pruneFinalized(finalizedEpoch: Epoch): void; + delete(cp: phase0.Checkpoint): void; + pruneFromMemory(): Promise; + clear(): void; + dumpSummary(): routes.lodestar.StateCacheItem[]; +} + +export const CHECKPOINT_STATES_FOLDER = "./unfinalized_checkpoint_states"; + +export type CheckpointKey = string; + +export enum CacheType { + state = "state", + persistence = "persistence", +} + +// Reason to remove a checkpoint state from file/db +export enum RemovePersistedStateReason { + pruneFinalized = "prune_finalized", + reload = "reload", + stateUpdate = "state_update", +} + +export type GetHeadStateFn = () => CachedBeaconStateAllForks; + +export type PersistentCheckpointStateCacheOpts = { + // Keep max n states in memory, persist the rest to disk + maxEpochsInMemory: number; +}; + +export type PersistentCheckpointStateCacheModules = { + metrics?: Metrics | null; + logger: Logger; + clock?: IClock | null; + shufflingCache: ShufflingCache; + persistentApis: CPStatePersistentApis; + getHeadState?: GetHeadStateFn; +}; diff --git a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts index 0cd96a8278e..9b3f79f594e 100644 --- a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts @@ -4,8 +4,6 @@ import {phase0, RootHex, ssz, ValidatorIndex} from "@lodestar/types"; import { computeEpochAtSlot, isAggregatorFromCommitteeLength, - getIndexedAttestationSignatureSet, - ISignatureSet, createAggregateSignatureSetFromComponents, } from "@lodestar/state-transition"; import {IBeaconChain} from ".."; @@ -14,8 +12,9 @@ import {RegenCaller} from "../regen/index.js"; import {getAttDataBase64FromSignedAggregateAndProofSerialized} from "../../util/sszBytes.js"; import {getSelectionProofSignatureSet, getAggregateAndProofSignatureSet} from "./signatureSets/index.js"; import { + getAttestationDataSigningRoot, getCommitteeIndices, - getStateForAttestationVerification, + getShufflingForAttestationVerification, verifyHeadBlockAndTargetRoot, verifyPropagationSlotRange, } from "./attestation.js"; @@ -142,17 +141,24 @@ async function validateAggregateAndProof( // -- i.e. get_ancestor(store, aggregate.data.beacon_block_root, compute_start_slot_at_epoch(store.finalized_checkpoint.epoch)) == store.finalized_checkpoint.root // > Altready check in `chain.forkChoice.hasBlock(attestation.data.beaconBlockRoot)` - const attHeadState = await getStateForAttestationVerification( + const shuffling = await getShufflingForAttestationVerification( chain, - attSlot, attEpoch, attHeadBlock, - RegenCaller.validateGossipAggregateAndProof + RegenCaller.validateGossipAttestation ); + if (shuffling === null) { + throw new AttestationError(GossipAction.IGNORE, { + code: AttestationErrorCode.NO_COMMITTEE_FOR_SLOT_AND_INDEX, + index: attIndex, + slot: attSlot, + }); + } + const committeeIndices: number[] = cachedAttData ? cachedAttData.committeeIndices - : getCommitteeIndices(attHeadState, attSlot, attIndex); + : getCommitteeIndices(shuffling, attSlot, attIndex); const attestingIndices = aggregate.aggregationBits.intersectValues(committeeIndices); const indexedAttestation: phase0.IndexedAttestation = { @@ -185,21 +191,16 @@ async function validateAggregateAndProof( // by the validator with index aggregate_and_proof.aggregator_index. // [REJECT] The aggregator signature, signed_aggregate_and_proof.signature, is valid. // [REJECT] The signature of aggregate is valid. - const aggregator = attHeadState.epochCtx.index2pubkey[aggregateAndProof.aggregatorIndex]; - let indexedAttestationSignatureSet: ISignatureSet; - if (cachedAttData) { - const {signingRoot} = cachedAttData; - indexedAttestationSignatureSet = createAggregateSignatureSetFromComponents( - indexedAttestation.attestingIndices.map((i) => chain.index2pubkey[i]), - signingRoot, - indexedAttestation.signature - ); - } else { - indexedAttestationSignatureSet = getIndexedAttestationSignatureSet(attHeadState, indexedAttestation); - } + const aggregator = chain.index2pubkey[aggregateAndProof.aggregatorIndex]; + const signingRoot = cachedAttData ? cachedAttData.signingRoot : getAttestationDataSigningRoot(chain.config, attData); + const indexedAttestationSignatureSet = createAggregateSignatureSetFromComponents( + indexedAttestation.attestingIndices.map((i) => chain.index2pubkey[i]), + signingRoot, + indexedAttestation.signature + ); const signatureSets = [ - getSelectionProofSignatureSet(attHeadState, attSlot, aggregator, signedAggregateAndProof), - getAggregateAndProofSignatureSet(attHeadState, attEpoch, aggregator, signedAggregateAndProof), + getSelectionProofSignatureSet(chain.config, attSlot, aggregator, signedAggregateAndProof), + getAggregateAndProofSignatureSet(chain.config, attEpoch, aggregator, signedAggregateAndProof), indexedAttestationSignatureSet, ]; // no need to write to SeenAttestationDatas diff --git a/packages/beacon-node/src/chain/validation/attestation.ts b/packages/beacon-node/src/chain/validation/attestation.ts index 0b642101f01..5db87fa9f5e 100644 --- a/packages/beacon-node/src/chain/validation/attestation.ts +++ b/packages/beacon-node/src/chain/validation/attestation.ts @@ -1,16 +1,19 @@ import {toHexString} from "@chainsafe/ssz"; import {phase0, Epoch, Root, Slot, RootHex, ssz} from "@lodestar/types"; -import {ProtoBlock} from "@lodestar/fork-choice"; -import {ATTESTATION_SUBNET_COUNT, SLOTS_PER_EPOCH, ForkName, ForkSeq} from "@lodestar/params"; +import {EpochDifference, ProtoBlock} from "@lodestar/fork-choice"; +import {ATTESTATION_SUBNET_COUNT, SLOTS_PER_EPOCH, ForkName, ForkSeq, DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import { computeEpochAtSlot, CachedBeaconStateAllForks, - getAttestationDataSigningRoot, createSingleSignatureSetFromComponents, SingleSignatureSet, EpochCacheError, EpochCacheErrorCode, + EpochShuffling, + computeStartSlotAtEpoch, + computeSigningRoot, } from "@lodestar/state-transition"; +import {BeaconConfig} from "@lodestar/config"; import {AttestationError, AttestationErrorCode, GossipAction} from "../errors/index.js"; import {MAXIMUM_GOSSIP_CLOCK_DISPARITY_SEC} from "../../constants/index.js"; import {RegenCaller} from "../regen/index.js"; @@ -56,12 +59,6 @@ export type Step0Result = AttestationValidationResult & { validatorIndex: number; }; -/** - * The beacon chain shufflings are designed to provide 1 epoch lookahead - * At each state, we have previous shuffling, current shuffling and next shuffling - */ -const SHUFFLING_LOOK_AHEAD_EPOCHS = 1; - /** * Validate a single gossip attestation, do not prioritize bls signature set */ @@ -359,19 +356,26 @@ async function validateGossipAttestationNoSignatureCheck( // --i.e. get_ancestor(store, attestation.data.beacon_block_root, compute_start_slot_at_epoch(attestation.data.target.epoch)) == attestation.data.target.root // > Altready check in `verifyHeadBlockAndTargetRoot()` - const attHeadState = await getStateForAttestationVerification( + const shuffling = await getShufflingForAttestationVerification( chain, - attSlot, attEpoch, attHeadBlock, RegenCaller.validateGossipAttestation ); + if (shuffling === null) { + throw new AttestationError(GossipAction.IGNORE, { + code: AttestationErrorCode.NO_COMMITTEE_FOR_SLOT_AND_INDEX, + index: attIndex, + slot: attSlot, + }); + } + // [REJECT] The committee index is within the expected range // -- i.e. data.index < get_committee_count_per_slot(state, data.target.epoch) - committeeIndices = getCommitteeIndices(attHeadState, attSlot, attIndex); - getSigningRoot = () => getAttestationDataSigningRoot(attHeadState, attData); - expectedSubnet = attHeadState.epochCtx.computeSubnetForSlot(attSlot, attIndex); + committeeIndices = getCommitteeIndices(shuffling, attSlot, attIndex); + getSigningRoot = () => getAttestationDataSigningRoot(chain.config, attData); + expectedSubnet = computeSubnetForSlot(shuffling, attSlot, attIndex); } const validatorIndex = committeeIndices[bitIndex]; @@ -568,41 +572,116 @@ export function verifyHeadBlockAndTargetRoot( } /** - * Get a state for attestation verification. - * Use head state if: - * - attestation slot is in the same fork as head block - * - head state includes committees of target epoch + * Get a shuffling for attestation verification from the ShufflingCache. + * - if blockEpoch is attEpoch, use current shuffling of head state + * - if blockEpoch is attEpoch - 1, use next shuffling of head state + * - if blockEpoch is less than attEpoch - 1, dial head state to attEpoch - 1, and add to ShufflingCache + * + * This implementation does not require to dial head state to attSlot at fork boundary because we always get domain of attSlot + * in consumer context. * - * Otherwise, regenerate state from head state dialing to target epoch + * This is similar to the old getStateForAttestationVerification + * see https://github.com/ChainSafe/lodestar/blob/v1.11.3/packages/beacon-node/src/chain/validation/attestation.ts#L566 */ -export async function getStateForAttestationVerification( +export async function getShufflingForAttestationVerification( chain: IBeaconChain, - attSlot: Slot, attEpoch: Epoch, attHeadBlock: ProtoBlock, regenCaller: RegenCaller -): Promise { - const isSameFork = chain.config.getForkSeq(attSlot) === chain.config.getForkSeq(attHeadBlock.slot); - // thanks for 1 epoch look ahead of shuffling, a state at epoch n can get committee for epoch n+1 - const headStateHasTargetEpochCommmittee = - attEpoch - computeEpochAtSlot(attHeadBlock.slot) <= SHUFFLING_LOOK_AHEAD_EPOCHS; +): Promise { + const blockEpoch = computeEpochAtSlot(attHeadBlock.slot); + let shufflingDependentRoot: RootHex; + if (blockEpoch === attEpoch) { + // current shuffling, this is equivalent to `headState.currentShuffling` + // given blockEpoch = attEpoch = n + // epoch: (n-2) (n-1) n (n+1) + // |-------|-------|-------|-------| + // attHeadBlock ------------------------^ + // shufflingDependentRoot ------^ + shufflingDependentRoot = chain.forkChoice.getDependentRoot(attHeadBlock, EpochDifference.previous); + } else if (blockEpoch === attEpoch - 1) { + // next shuffling, this is equivalent to `headState.nextShuffling` + // given blockEpoch = n-1, attEpoch = n + // epoch: (n-2) (n-1) n (n+1) + // |-------|-------|-------|-------| + // attHeadBlock -------------------^ + // shufflingDependentRoot ------^ + shufflingDependentRoot = chain.forkChoice.getDependentRoot(attHeadBlock, EpochDifference.current); + } else if (blockEpoch < attEpoch - 1) { + // this never happens with default chain option of maxSkipSlots = 32, however we still need to handle it + // check the verifyHeadBlockAndTargetRoot() function above + // given blockEpoch = n-2, attEpoch = n + // epoch: (n-2) (n-1) n (n+1) + // |-------|-------|-------|-------| + // attHeadBlock -----------^ + // shufflingDependentRoot -----^ + shufflingDependentRoot = attHeadBlock.blockRoot; + // use lodestar_gossip_attestation_head_slot_to_attestation_slot metric to track this case + } else { + // blockEpoch > attEpoch + // should not happen, handled in verifyAttestationTargetRoot + throw Error(`attestation epoch ${attEpoch} is before head block epoch ${blockEpoch}`); + } + + let shuffling = chain.shufflingCache.get(attEpoch, shufflingDependentRoot); + if (shuffling) { + // most of the time, we should get the shuffling from cache + chain.metrics?.gossipAttestation.shufflingHit.inc({caller: regenCaller}); + return shuffling; + } + chain.metrics?.gossipAttestation.shufflingMiss.inc({caller: regenCaller}); + + let state: CachedBeaconStateAllForks; try { - if (isSameFork && headStateHasTargetEpochCommmittee) { - // most of the time it should just use head state + if (blockEpoch < attEpoch - 1) { + // thanks to one epoch look ahead, we don't need to dial up to attEpoch + const targetSlot = computeStartSlotAtEpoch(attEpoch - 1); + chain.metrics?.gossipAttestation.useHeadBlockStateDialedToTargetEpoch.inc({caller: regenCaller}); + state = await chain.regen.getBlockSlotState( + attHeadBlock.blockRoot, + targetSlot, + {dontTransferCache: true}, + regenCaller + ); + } else if (blockEpoch > attEpoch) { + // should not happen, handled above + throw Error(`Block epoch ${blockEpoch} is after attestation epoch ${attEpoch}`); + } else { + // should use either current or next shuffling of head state + // it's not likely to hit this since these shufflings are cached already + // so handle just in case chain.metrics?.gossipAttestation.useHeadBlockState.inc({caller: regenCaller}); - return await chain.regen.getState(attHeadBlock.stateRoot, regenCaller); + state = await chain.regen.getState(attHeadBlock.stateRoot, regenCaller); } - - // at fork boundary we should dial head state to target epoch - // see https://github.com/ChainSafe/lodestar/pull/4849 - chain.metrics?.gossipAttestation.useHeadBlockStateDialedToTargetEpoch.inc({caller: regenCaller}); - return await chain.regen.getBlockSlotState(attHeadBlock.blockRoot, attSlot, {dontTransferCache: true}, regenCaller); } catch (e) { throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.MISSING_STATE_TO_VERIFY_ATTESTATION, error: e as Error, }); } + + // add to cache + chain.shufflingCache.processState(state, attEpoch); + shuffling = chain.shufflingCache.get(attEpoch, shufflingDependentRoot); + if (shuffling) { + chain.metrics?.gossipAttestation.shufflingRegenHit.inc({caller: regenCaller}); + return shuffling; + } else { + chain.metrics?.gossipAttestation.shufflingRegenMiss.inc({caller: regenCaller}); + return null; + } +} + +/** + * Different version of getAttestationDataSigningRoot in state-transition which doesn't require a state. + */ +export function getAttestationDataSigningRoot(config: BeaconConfig, data: phase0.AttestationData): Uint8Array { + const slot = computeStartSlotAtEpoch(data.target.epoch); + // previously, we call `domain = config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot)` + // at fork boundary, it's required to dial to target epoch https://github.com/ChainSafe/lodestar/blob/v1.11.3/packages/beacon-node/src/chain/validation/attestation.ts#L573 + // instead of that, just use the slot in the attestation data + const domain = config.getDomain(slot, DOMAIN_BEACON_ATTESTER); + return computeSigningRoot(ssz.phase0.AttestationData, data, domain); } /** @@ -680,21 +759,10 @@ function verifyAttestationTargetRoot(headBlock: ProtoBlock, targetRoot: Root, at } export function getCommitteeIndices( - attestationTargetState: CachedBeaconStateAllForks, + shuffling: EpochShuffling, attestationSlot: Slot, attestationIndex: number ): number[] { - const shuffling = attestationTargetState.epochCtx.getShufflingAtSlotOrNull(attestationSlot); - if (shuffling === null) { - // this may come from an out-of-synced node, the spec did not define it so should not REJECT - // see https://github.com/ChainSafe/lodestar/issues/4396 - throw new AttestationError(GossipAction.IGNORE, { - code: AttestationErrorCode.NO_COMMITTEE_FOR_SLOT_AND_INDEX, - index: attestationIndex, - slot: attestationSlot, - }); - } - const {committees} = shuffling; const slotCommittees = committees[attestationSlot % SLOTS_PER_EPOCH]; @@ -710,9 +778,8 @@ export function getCommitteeIndices( /** * Compute the correct subnet for a slot/committee index */ -export function computeSubnetForSlot(state: CachedBeaconStateAllForks, slot: number, committeeIndex: number): number { +export function computeSubnetForSlot(shuffling: EpochShuffling, slot: number, committeeIndex: number): number { const slotsSinceEpochStart = slot % SLOTS_PER_EPOCH; - const committeesPerSlot = state.epochCtx.getCommitteeCountPerSlot(computeEpochAtSlot(slot)); - const committeesSinceEpochStart = committeesPerSlot * slotsSinceEpochStart; + const committeesSinceEpochStart = shuffling.committeesPerSlot * slotsSinceEpochStart; return (committeesSinceEpochStart + committeeIndex) % ATTESTATION_SUBNET_COUNT; } diff --git a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts index 099590ee019..7b4674b3a86 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts @@ -3,32 +3,35 @@ import {DOMAIN_AGGREGATE_AND_PROOF} from "@lodestar/params"; import {ssz} from "@lodestar/types"; import {Epoch, phase0} from "@lodestar/types"; import { - CachedBeaconStateAllForks, computeSigningRoot, computeStartSlotAtEpoch, createSingleSignatureSetFromComponents, ISignatureSet, } from "@lodestar/state-transition"; +import {BeaconConfig} from "@lodestar/config"; export function getAggregateAndProofSigningRoot( - state: CachedBeaconStateAllForks, + config: BeaconConfig, epoch: Epoch, aggregateAndProof: phase0.SignedAggregateAndProof ): Uint8Array { + // previously, we call `const aggregatorDomain = state.config.getDomain(state.slot, DOMAIN_AGGREGATE_AND_PROOF, slot);` + // at fork boundary, it's required to dial to target epoch https://github.com/ChainSafe/lodestar/blob/v1.11.3/packages/beacon-node/src/chain/validation/attestation.ts#L573 + // instead of that, just use the slot in the attestation data const slot = computeStartSlotAtEpoch(epoch); - const aggregatorDomain = state.config.getDomain(state.slot, DOMAIN_AGGREGATE_AND_PROOF, slot); + const aggregatorDomain = config.getDomain(slot, DOMAIN_AGGREGATE_AND_PROOF); return computeSigningRoot(ssz.phase0.AggregateAndProof, aggregateAndProof.message, aggregatorDomain); } export function getAggregateAndProofSignatureSet( - state: CachedBeaconStateAllForks, + config: BeaconConfig, epoch: Epoch, aggregator: PublicKey, aggregateAndProof: phase0.SignedAggregateAndProof ): ISignatureSet { return createSingleSignatureSetFromComponents( aggregator, - getAggregateAndProofSigningRoot(state, epoch, aggregateAndProof), + getAggregateAndProofSigningRoot(config, epoch, aggregateAndProof), aggregateAndProof.signature ); } diff --git a/packages/beacon-node/src/chain/validation/signatureSets/selectionProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/selectionProof.ts index dbb8e338060..5da8a8a12da 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/selectionProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/selectionProof.ts @@ -1,27 +1,26 @@ import type {PublicKey} from "@chainsafe/bls/types"; import {DOMAIN_SELECTION_PROOF} from "@lodestar/params"; import {phase0, Slot, ssz} from "@lodestar/types"; -import { - CachedBeaconStateAllForks, - computeSigningRoot, - createSingleSignatureSetFromComponents, - ISignatureSet, -} from "@lodestar/state-transition"; +import {computeSigningRoot, createSingleSignatureSetFromComponents, ISignatureSet} from "@lodestar/state-transition"; +import {BeaconConfig} from "@lodestar/config"; -export function getSelectionProofSigningRoot(state: CachedBeaconStateAllForks, slot: Slot): Uint8Array { - const selectionProofDomain = state.config.getDomain(state.slot, DOMAIN_SELECTION_PROOF, slot); +export function getSelectionProofSigningRoot(config: BeaconConfig, slot: Slot): Uint8Array { + // previously, we call `const selectionProofDomain = config.getDomain(state.slot, DOMAIN_SELECTION_PROOF, slot)` + // at fork boundary, it's required to dial to target epoch https://github.com/ChainSafe/lodestar/blob/v1.11.3/packages/beacon-node/src/chain/validation/attestation.ts#L573 + // instead of that, just use the slot in the attestation data + const selectionProofDomain = config.getDomain(slot, DOMAIN_SELECTION_PROOF); return computeSigningRoot(ssz.Slot, slot, selectionProofDomain); } export function getSelectionProofSignatureSet( - state: CachedBeaconStateAllForks, + config: BeaconConfig, slot: Slot, aggregator: PublicKey, aggregateAndProof: phase0.SignedAggregateAndProof ): ISignatureSet { return createSingleSignatureSetFromComponents( aggregator, - getSelectionProofSigningRoot(state, slot), + getSelectionProofSigningRoot(config, slot), aggregateAndProof.message.selectionProof ); } diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts index 58b99f2a37e..07cc47fa54d 100644 --- a/packages/beacon-node/src/db/beacon.ts +++ b/packages/beacon-node/src/db/beacon.ts @@ -21,6 +21,7 @@ import { BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; +import {CheckpointStateRepository} from "./repositories/checkpointState.js"; export type BeaconDbModules = { config: ChainForkConfig; @@ -35,6 +36,7 @@ export class BeaconDb implements IBeaconDb { blobSidecarsArchive: BlobSidecarsArchiveRepository; stateArchive: StateArchiveRepository; + checkpointState: CheckpointStateRepository; voluntaryExit: VoluntaryExitRepository; proposerSlashing: ProposerSlashingRepository; @@ -67,6 +69,7 @@ export class BeaconDb implements IBeaconDb { this.blobSidecarsArchive = new BlobSidecarsArchiveRepository(config, db); this.stateArchive = new StateArchiveRepository(config, db); + this.checkpointState = new CheckpointStateRepository(config, db); this.voluntaryExit = new VoluntaryExitRepository(config, db); this.blsToExecutionChange = new BLSToExecutionChangeRepository(config, db); this.proposerSlashing = new ProposerSlashingRepository(config, db); diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 1a3abfa3362..5b0f1219e75 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -59,6 +59,7 @@ export enum Bucket { // 54 was for bestPartialLightClientUpdate, allocate a fresh one // lightClient_bestLightClientUpdate = 55, // SyncPeriod -> LightClientUpdate // DEPRECATED on v1.5.0 lightClient_bestLightClientUpdate = 56, // SyncPeriod -> [Slot, LightClientUpdate] + allForks_checkpointState = 57, // Root -> allForks.BeaconState } export function getBucketNameByValue(enumValue: T): keyof typeof Bucket { diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts index 58bf25c57aa..6936cbd0c38 100644 --- a/packages/beacon-node/src/db/interface.ts +++ b/packages/beacon-node/src/db/interface.ts @@ -19,6 +19,7 @@ import { BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; +import {CheckpointStateRepository} from "./repositories/checkpointState.js"; /** * The DB service manages the data layer of the beacon chain @@ -36,6 +37,8 @@ export interface IBeaconDb { // finalized states stateArchive: StateArchiveRepository; + // temporary checkpoint states + checkpointState: CheckpointStateRepository; // op pool voluntaryExit: VoluntaryExitRepository; diff --git a/packages/beacon-node/src/db/repositories/checkpointState.ts b/packages/beacon-node/src/db/repositories/checkpointState.ts new file mode 100644 index 00000000000..8848f4d26d3 --- /dev/null +++ b/packages/beacon-node/src/db/repositories/checkpointState.ts @@ -0,0 +1,31 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {Db, Repository} from "@lodestar/db"; +import {BeaconStateAllForks} from "@lodestar/state-transition"; +import {ssz} from "@lodestar/types"; +import {Bucket, getBucketNameByValue} from "../buckets.js"; + +/** + * Store temporary checkpoint states. + * We should only put/get binary data from this repository, consumer will load it into an existing state ViewDU object. + */ +export class CheckpointStateRepository extends Repository { + constructor(config: ChainForkConfig, db: Db) { + // Pick some type but won't be used. Casted to any because no type can match `BeaconStateAllForks` + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any + const type = ssz.phase0.BeaconState as any; + const bucket = Bucket.allForks_checkpointState; + super(config, db, bucket, type, getBucketNameByValue(bucket)); + } + + getId(): Uint8Array { + throw Error("CheckpointStateRepository does not work with value"); + } + + encodeValue(): Uint8Array { + throw Error("CheckpointStateRepository does not work with value"); + } + + decodeValue(): BeaconStateAllForks { + throw Error("CheckpointStateRepository does not work with value"); + } +} diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index 8b8ce0f0c2b..b2461d0acca 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -584,6 +584,26 @@ export function createLodestarMetrics( labelNames: ["caller"], buckets: [0, 1, 2, 4, 8, 16, 32, 64], }), + shufflingHit: register.gauge<"caller">({ + name: "lodestar_gossip_attestation_shuffling_hit_count", + help: "Count of gossip attestation verification shuffling hit", + labelNames: ["caller"], + }), + shufflingMiss: register.gauge<"caller">({ + name: "lodestar_gossip_attestation_shuffling_miss_count", + help: "Count of gossip attestation verification shuffling miss", + labelNames: ["caller"], + }), + shufflingRegenHit: register.gauge<"caller">({ + name: "lodestar_gossip_attestation_shuffling_regen_hit_count", + help: "Count of gossip attestation verification shuffling regen hit", + labelNames: ["caller"], + }), + shufflingRegenMiss: register.gauge<"caller">({ + name: "lodestar_gossip_attestation_shuffling_regen_miss_count", + help: "Count of gossip attestation verification shuffling regen miss", + labelNames: ["caller"], + }), attestationSlotToClockSlot: register.histogram<"caller">({ name: "lodestar_gossip_attestation_attestation_slot_to_clock_slot", help: "Slot distance between clock slot and attestation slot", @@ -1020,13 +1040,15 @@ export function createLodestarMetrics( name: "lodestar_cp_state_cache_adds_total", help: "Total number of items added in checkpoint state cache", }), - size: register.gauge({ + size: register.gauge<"type">({ name: "lodestar_cp_state_cache_size", help: "Checkpoint state cache size", + labelNames: ["type"], }), - epochSize: register.gauge({ + epochSize: register.gauge<"type">({ name: "lodestar_cp_state_epoch_size", help: "Checkpoint state cache size", + labelNames: ["type"], }), reads: register.avgMinMax({ name: "lodestar_cp_state_epoch_reads", @@ -1041,6 +1063,40 @@ export function createLodestarMetrics( help: "Histogram of cloned count per state every time state.clone() is called", buckets: [1, 2, 5, 10, 50, 250], }), + statePersistDuration: register.histogram({ + name: "lodestar_cp_state_cache_state_persist_seconds", + help: "Histogram of time to persist state to memory", + buckets: [0.5, 1, 2, 4], + }), + statePruneFromMemoryCount: register.gauge({ + name: "lodestar_cp_state_cache_state_prune_from_memory_count", + help: "Total number of states pruned from memory", + }), + statePersistSecFromSlot: register.histogram({ + name: "lodestar_cp_state_cache_state_persist_seconds_from_slot", + help: "Histogram of time to persist state to memory from slot", + buckets: [0, 4, 8, 12], + }), + stateReloadDuration: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_seconds", + help: "Histogram of time to load state from disk", + buckets: [2, 4, 6, 8], + }), + stateReloadEpochDiff: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_epoch_diff", + help: "Histogram of epoch difference between seed state epoch and loaded state epoch", + buckets: [0, 1, 2, 4, 8, 16, 32], + }), + stateReloadSecFromSlot: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_seconds_from_slot", + help: "Histogram of time to load state from disk from slot", + buckets: [0, 4, 8, 12], + }), + stateRemoveCount: register.gauge<"reason">({ + name: "lodestar_cp_state_cache_state_remove_count", + help: "Total number of persisted states removed", + labelNames: ["reason"], + }), }, balancesCache: { @@ -1059,6 +1115,13 @@ export function createLodestarMetrics( }), }, + shufflingCache: { + size: register.gauge({ + name: "lodestar_shuffling_cache_size", + help: "Shuffling cache size", + }), + }, + seenCache: { aggregatedAttestations: { superSetCheckTotal: register.histogram({ diff --git a/packages/beacon-node/src/util/array.ts b/packages/beacon-node/src/util/array.ts index 72f81fbee72..30723a1b036 100644 --- a/packages/beacon-node/src/util/array.ts +++ b/packages/beacon-node/src/util/array.ts @@ -45,6 +45,9 @@ export class LinkedList { return this._length; } + /** + * Add to the end of the list + */ push(data: T): void { if (this._length === 0) { this.tail = this.head = new Node(data); @@ -64,6 +67,9 @@ export class LinkedList { this._length++; } + /** + * Add to the beginning of the list + */ unshift(data: T): void { if (this._length === 0) { this.tail = this.head = new Node(data); @@ -173,6 +179,22 @@ export class LinkedList { return false; } + /** + * Move an existing item to the head of the list. + * If the item is not found, do nothing. + */ + moveToHead(item: T): void { + // if this is head, do nothing + if (this.head?.data === item) { + return; + } + + const found = this.deleteFirst(item); + if (found) { + this.unshift(item); + } + } + next(): IteratorResult { if (!this.pointer) { return {done: true, value: undefined}; diff --git a/packages/beacon-node/src/util/file.ts b/packages/beacon-node/src/util/file.ts index af78ca8b612..bc8d1fe8bcc 100644 --- a/packages/beacon-node/src/util/file.ts +++ b/packages/beacon-node/src/util/file.ts @@ -1,6 +1,8 @@ import fs from "node:fs"; import {promisify} from "node:util"; +// TODO: use @lodestar/util instead + /** Ensure a directory exists */ export async function ensureDir(path: string): Promise { try { diff --git a/packages/beacon-node/src/util/multifork.ts b/packages/beacon-node/src/util/multifork.ts index 81b4921a0a4..2b84fd86861 100644 --- a/packages/beacon-node/src/util/multifork.ts +++ b/packages/beacon-node/src/util/multifork.ts @@ -1,8 +1,9 @@ import {ChainForkConfig} from "@lodestar/config"; -import {allForks} from "@lodestar/types"; +import {Slot, allForks} from "@lodestar/types"; import {bytesToInt} from "@lodestar/utils"; import {getSlotFromSignedBeaconBlockSerialized} from "./sszBytes.js"; +// TODO: merge to sszBytes.ts util /** * Slot uint64 */ @@ -36,10 +37,14 @@ export function getStateTypeFromBytes( config: ChainForkConfig, bytes: Buffer | Uint8Array ): allForks.AllForksSSZTypes["BeaconState"] { - const slot = bytesToInt(bytes.subarray(SLOT_BYTES_POSITION_IN_STATE, SLOT_BYTES_POSITION_IN_STATE + SLOT_BYTE_COUNT)); + const slot = getStateSlotFromBytes(bytes); return config.getForkTypes(slot).BeaconState; } +export function getStateSlotFromBytes(bytes: Uint8Array): Slot { + return bytesToInt(bytes.subarray(SLOT_BYTES_POSITION_IN_STATE, SLOT_BYTES_POSITION_IN_STATE + SLOT_BYTE_COUNT)); +} + /** * First field in update is beacon, first field in beacon is slot * diff --git a/packages/beacon-node/src/util/sszBytes.ts b/packages/beacon-node/src/util/sszBytes.ts index 0c258df3504..9b3ec27ee15 100644 --- a/packages/beacon-node/src/util/sszBytes.ts +++ b/packages/beacon-node/src/util/sszBytes.ts @@ -1,10 +1,14 @@ import {BitArray, deserializeUint8ArrayBitListFromBytes} from "@chainsafe/ssz"; -import {BLSSignature, RootHex, Slot} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; +import {BLSSignature, RootHex, Slot, ssz} from "@lodestar/types"; import {toHex} from "@lodestar/utils"; +import {getStateTypeFromBytes} from "./multifork.js"; export type BlockRootHex = RootHex; export type AttDataBase64 = string; +// TODO: deduplicate with packages/state-transition/src/util/sszBytes.ts + // class Attestation(Container): // aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] - offset 4 // data: AttestationData - target data - 128 @@ -204,6 +208,52 @@ export function getSlotFromSignedBlobSidecarSerialized(data: Uint8Array): Slot | return getSlotFromOffset(data, SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR); } +type BeaconStateType = + | typeof ssz.phase0.BeaconState + | typeof ssz.altair.BeaconState + | typeof ssz.bellatrix.BeaconState + | typeof ssz.capella.BeaconState + | typeof ssz.deneb.BeaconState; + +export function getValidatorsBytesFromStateBytes(config: ChainForkConfig, stateBytes: Uint8Array): Uint8Array { + const stateType = getStateTypeFromBytes(config, stateBytes) as BeaconStateType; + const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); + const fieldRanges = stateType.getFieldRanges(dataView, 0, stateBytes.length); + const allFields = Object.keys(stateType.fields); + const validatorsFieldIndex = allFields.indexOf("validators"); + const validatorsRange = fieldRanges[validatorsFieldIndex]; + return stateBytes.slice(validatorsRange.start, validatorsRange.end); +} + +/** + * 48 + 32 + 8 + 1 + 8 + 8 + 8 + 8 = 121 + * ``` + * class Validator(Container): + pubkey: BLSPubkey [fixed - 48 bytes] + withdrawal_credentials: Bytes32 [fixed - 32 bytes] + effective_balance: Gwei [fixed - 8 bytes] + slashed: boolean [fixed - 1 byte] + # Status epochs + activation_eligibility_epoch: Epoch [fixed - 8 bytes] + activation_epoch: Epoch [fixed - 8 bytes] + exit_epoch: Epoch [fixed - 8 bytes] + withdrawable_epoch: Epoch [fixed - 8 bytes] + ``` + */ +const VALIDATOR_BYTES_SIZE = 121; +const BLS_PUBKEY_SIZE = 48; + +export function getWithdrawalCredentialFirstByteFromValidatorBytes( + validatorBytes: Uint8Array, + validatorIndex: number +): number | null { + if (validatorBytes.length < VALIDATOR_BYTES_SIZE * (validatorIndex + 1)) { + return null; + } + + return validatorBytes[VALIDATOR_BYTES_SIZE * validatorIndex + BLS_PUBKEY_SIZE]; +} + function getSlotFromOffset(data: Uint8Array, offset: number): Slot { // TODO: Optimize const dv = new DataView(data.buffer, data.byteOffset, data.byteLength); diff --git a/packages/beacon-node/test/perf/chain/stateCache/stateContextCheckpointsCache.test.ts b/packages/beacon-node/test/perf/chain/stateCache/stateContextCheckpointsCache.test.ts index cf0ab1fa16b..0ac43b3c7a3 100644 --- a/packages/beacon-node/test/perf/chain/stateCache/stateContextCheckpointsCache.test.ts +++ b/packages/beacon-node/test/perf/chain/stateCache/stateContextCheckpointsCache.test.ts @@ -2,7 +2,14 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {ssz, phase0} from "@lodestar/types"; import {generateCachedState} from "../../../utils/state.js"; -import {CheckpointStateCache, toCheckpointHex} from "../../../../src/chain/stateCache/index.js"; +import { + CheckpointStateCache, + PersistentCheckpointStateCache, + toCheckpointHex, +} from "../../../../src/chain/stateCache/index.js"; +import {ShufflingCache} from "../../../../src/chain/shufflingCache.js"; +import {testLogger} from "../../../utils/logger.js"; +import {getTestPersistentApi} from "../../../utils/persistent.js"; describe("CheckpointStateCache perf tests", function () { setBenchOpts({noThreshold: true}); @@ -12,7 +19,10 @@ describe("CheckpointStateCache perf tests", function () { let checkpointStateCache: CheckpointStateCache; before(() => { - checkpointStateCache = new CheckpointStateCache({}); + checkpointStateCache = new PersistentCheckpointStateCache( + {logger: testLogger(), shufflingCache: new ShufflingCache(), persistentApis: getTestPersistentApi(new Map())}, + {maxEpochsInMemory: 2} + ); state = generateCachedState(); checkpoint = ssz.phase0.Checkpoint.defaultValue(); }); diff --git a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts index 21b70c69a42..d99a0570801 100644 --- a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts +++ b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts @@ -89,6 +89,8 @@ describe.skip("verify+import blocks - range sync perf test", () => { skipCreateStateCacheIfAvailable: true, archiveStateEpochFrequency: 1024, minSameMessageSignatureSetsToBatch: 32, + maxStates: 32, + maxEpochsInMemory: 2, }, { config: state.config, diff --git a/packages/beacon-node/test/unit/chain/stateCache/lruBlockStateCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/lruBlockStateCache.test.ts new file mode 100644 index 00000000000..7fc64f1263c --- /dev/null +++ b/packages/beacon-node/test/unit/chain/stateCache/lruBlockStateCache.test.ts @@ -0,0 +1,79 @@ +import {expect} from "chai"; +import {toHexString} from "@chainsafe/ssz"; +import {EpochShuffling} from "@lodestar/state-transition"; +import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {Root} from "@lodestar/types"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition/src/types.js"; +import {LRUBlockStateCache} from "../../../../src/chain/stateCache/index.js"; +import {generateCachedState} from "../../../utils/state.js"; + +describe("LRUBlockStateCache", function () { + let cache: LRUBlockStateCache; + const shuffling: EpochShuffling = { + epoch: 0, + activeIndices: [], + shuffling: [], + committees: [], + committeesPerSlot: 1, + }; + + const state1 = generateCachedState({slot: 0}); + const key1 = state1.hashTreeRoot(); + state1.epochCtx.currentShuffling = {...shuffling, epoch: 0}; + + const state2 = generateCachedState({slot: 1 * SLOTS_PER_EPOCH}); + const key2 = state2.hashTreeRoot(); + state2.epochCtx.currentShuffling = {...shuffling, epoch: 1}; + + const state3 = generateCachedState({slot: 2 * SLOTS_PER_EPOCH}); + const key3 = state3.hashTreeRoot(); + state3.epochCtx.currentShuffling = {...shuffling, epoch: 2}; + + beforeEach(function () { + // max 2 items + cache = new LRUBlockStateCache({maxStates: 2}, {}); + cache.add(state1); + cache.add(state2); + }); + + const pruneTestCases: { + name: string; + lastAddedState: CachedBeaconStateAllForks; + keptStates: Root[]; + prunedStates: Root[]; + }[] = [ + { + name: "should prune key1", + lastAddedState: state2, + keptStates: [key3, key2], + prunedStates: [key1], + }, + { + name: "should prune key2", + lastAddedState: state1, + keptStates: [key3, key1], + prunedStates: [key2], + }, + ]; + + for (const {name, lastAddedState, keptStates, prunedStates} of pruneTestCases) { + it(name, () => { + // move to head this state + cache.add(lastAddedState); + expect(cache.size).to.be.equal(2, "Size must be same as initial 2"); + expect(cache.size).to.be.equal(2, "Size should reduce to initial 2 after prunning"); + cache.add(state3); + for (const key of keptStates) { + expect(cache.get(toHexString(key)), `must have key ${toHexString(key)}`).to.be.not.null; + } + for (const key of prunedStates) { + expect(cache.get(toHexString(key)), `must not have key ${toHexString(key)}`).to.be.null; + } + }); + } + + it("should deleteAllBeforeEpoch", function () { + cache.deleteAllBeforeEpoch(2); + expect(cache.size).to.be.equal(0, "size must be 0 after delete all"); + }); +}); diff --git a/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts new file mode 100644 index 00000000000..0579a51224d --- /dev/null +++ b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts @@ -0,0 +1,273 @@ +import {expect} from "chai"; +import {SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; +import {CachedBeaconStateAllForks, computeStartSlotAtEpoch} from "@lodestar/state-transition"; +import {Epoch, phase0} from "@lodestar/types"; +import {mapValues} from "@lodestar/utils"; +import { + PersistentCheckpointStateCache, + findClosestCheckpointState, + toCheckpointHex, + toCheckpointKey, +} from "../../../../src/chain/stateCache/persistentCheckpointsCache.js"; +import {generateCachedState} from "../../../utils/state.js"; +import {ShufflingCache} from "../../../../src/chain/shufflingCache.js"; +import {testLogger} from "../../../utils/logger.js"; +import {CheckpointHex} from "../../../../src/chain/stateCache/types.js"; +import {getTestPersistentApi} from "../../../utils/persistent.js"; +import {PersistentKey} from "../../../../src/chain/stateCache/persistent/types.js"; + +describe("PersistentCheckpointStateCache", function () { + let cache: PersistentCheckpointStateCache; + let fileApisBuffer: Map; + const root0a = Buffer.alloc(32); + const root0b = Buffer.alloc(32); + root0b[31] = 1; + // root0a is of the last slot of epoch 19 + const cp0a = {epoch: 20, root: root0a}; + // root0b is of the first slot of epoch 20 + const cp0b = {epoch: 20, root: root0b}; + const cp1 = {epoch: 21, root: Buffer.alloc(32, 1)}; + const cp2 = {epoch: 22, root: Buffer.alloc(32, 2)}; + const [cp0aHex, cp0bHex, cp1Hex, cp2Hex] = [cp0a, cp0b, cp1, cp2].map((cp) => toCheckpointHex(cp)); + const [cp0aKey, cp0bKey, cp1Key, cp2Key] = [cp0aHex, cp0bHex, cp1Hex, cp2Hex].map((cp) => toCheckpointKey(cp)); + const allStates = [cp0a, cp0b, cp1, cp2] + .map((cp) => generateCachedState({slot: cp.epoch * SLOTS_PER_EPOCH})) + .map((state) => { + const startSlotEpoch20 = computeStartSlotAtEpoch(20); + state.blockRoots.set((startSlotEpoch20 - 1) % SLOTS_PER_HISTORICAL_ROOT, root0a); + state.blockRoots.set(startSlotEpoch20 % SLOTS_PER_HISTORICAL_ROOT, root0b); + return state; + }); + + const states = { + cp0a: allStates[0], + cp0b: allStates[1], + cp1: allStates[2], + cp2: allStates[3], + }; + const stateBytes = mapValues(states, (state) => state.serialize()); + + beforeEach(() => { + fileApisBuffer = new Map(); + const persistentApis = getTestPersistentApi(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {persistentApis, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxEpochsInMemory: 2} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + cache.add(cp1, states["cp1"]); + }); + + it("getLatest", () => { + // cp0 + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch)?.hashTreeRoot()).to.be.deep.equal( + states["cp0a"].hashTreeRoot() + ); + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch + 1)?.hashTreeRoot()).to.be.deep.equal( + states["cp0a"].hashTreeRoot() + ); + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch - 1)?.hashTreeRoot()).to.be.undefined; + + // cp1 + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch)?.hashTreeRoot()).to.be.deep.equal(states["cp1"].hashTreeRoot()); + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch + 1)?.hashTreeRoot()).to.be.deep.equal( + states["cp1"].hashTreeRoot() + ); + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch - 1)?.hashTreeRoot()).to.be.undefined; + + // cp2 + expect(cache.getLatest(cp2Hex.rootHex, cp2.epoch)?.hashTreeRoot()).to.be.undefined; + }); + + it("getOrReloadLatest", async () => { + cache.add(cp2, states["cp2"]); + expect(await cache.pruneFromMemory()).to.be.equal(1); + // cp0b is persisted + expect(fileApisBuffer.size).to.be.equal(1); + expect(Array.from(fileApisBuffer.keys())).to.be.deep.equal([cp0bKey]); + + // getLatest() does not reload from disk + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch)).to.be.null; + expect(cache.getLatest(cp0bHex.rootHex, cp0b.epoch)).to.be.null; + + // cp0a has the root from previous epoch so we only prune it from db + expect(await cache.getOrReloadLatest(cp0aHex.rootHex, cp0a.epoch)).to.be.null; + // but getOrReloadLatest() does for cp0b + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch))?.serialize()).to.be.deep.equal( + stateBytes["cp0b"] + ); + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch + 1))?.serialize()).to.be.deep.equal( + stateBytes["cp0b"] + ); + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch - 1))?.serialize()).to.be.undefined; + }); + + const pruneTestCases: { + name: string; + cpDelete: phase0.Checkpoint | null; + cpKeyPersisted: string; + stateBytesPersisted: Uint8Array; + }[] = [ + /** + * This replicates the scenario that 1st slot of epoch is NOT skipped + * - cp0a has the root from previous epoch so we only prune it from db + * - cp0b has the root of 1st slot of epoch 20 so we prune it from db and persist to disk + */ + { + name: "pruneFromMemory: should prune epoch 20 states from memory and persist cp0b to disk", + cpDelete: null, + cpKeyPersisted: cp0bKey, + stateBytesPersisted: stateBytes["cp0b"], + }, + /** + * This replicates the scenario that 1st slot of epoch is skipped + * - cp0a has the root from previous epoch but since 1st slot of epoch 20 is skipped, it's the checkpoint state + * and we want to prune it from memory and persist to disk + */ + { + name: "pruneFromMemory: should prune epoch 20 states from memory and persist cp0a to disk", + cpDelete: cp0b, + cpKeyPersisted: cp0aKey, + stateBytesPersisted: stateBytes["cp0a"], + }, + ]; + + for (const {name, cpDelete, cpKeyPersisted, stateBytesPersisted} of pruneTestCases) { + it(name, async function () { + expect(fileApisBuffer.size).to.be.equal(0); + expect(cache.get(cp0aHex)).to.be.not.null; + expect(cache.get(cp0bHex)).to.be.not.null; + if (cpDelete) cache.delete(cpDelete); + cache.add(cp2, states["cp2"]); + await cache.pruneFromMemory(); + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + expect(cache.get(cp1Hex)?.hashTreeRoot()).to.be.deep.equal(states["cp1"].hashTreeRoot()); + expect(cache.get(cp2Hex)?.hashTreeRoot()).to.be.deep.equal(states["cp2"].hashTreeRoot()); + expect(fileApisBuffer.size).to.be.equal(1); + expect(Array.from(fileApisBuffer.keys())).to.be.deep.equal([cpKeyPersisted]); + expect(fileApisBuffer.get(cpKeyPersisted)).to.be.deep.equal(stateBytesPersisted); + }); + } + + const reloadTestCases: { + name: string; + cpDelete: phase0.Checkpoint | null; + cpKeyPersisted: CheckpointHex; + stateBytesPersisted: Uint8Array; + cpKeyPersisted2: CheckpointHex; + stateBytesPersisted2: Uint8Array; + }[] = [ + // both cp0a and cp0b are from lowest epoch but only cp0b is persisted because it has the root of 1st slot of epoch 20 + { + name: "getOrReload cp0b from disk", + cpDelete: null, + cpKeyPersisted: cp0bHex, + stateBytesPersisted: stateBytes["cp0b"], + cpKeyPersisted2: cp1Hex, + stateBytesPersisted2: stateBytes["cp1"], + }, + // although cp0a has the root of previous epoch, it's the checkpoint state so we want to reload it from disk + { + name: "getOrReload cp0a from disk", + cpDelete: cp0b, + cpKeyPersisted: cp0aHex, + stateBytesPersisted: stateBytes["cp0a"], + cpKeyPersisted2: cp1Hex, + stateBytesPersisted2: stateBytes["cp1"], + }, + ]; + + for (const { + name, + cpDelete, + cpKeyPersisted, + stateBytesPersisted, + cpKeyPersisted2, + stateBytesPersisted2, + } of reloadTestCases) { + it(name, async function () { + if (cpDelete) cache.delete(cpDelete); + expect(fileApisBuffer.size).to.be.equal(0); + cache.add(cp2, states["cp2"]); + expect(await cache.pruneFromMemory()).to.be.equal(1); + expect(cache.get(cp2Hex)?.hashTreeRoot()).to.be.deep.equal(states["cp2"].hashTreeRoot()); + expect(fileApisBuffer.size).to.be.equal(1); + const persistedKey0 = toCheckpointKey(cpKeyPersisted); + expect(Array.from(fileApisBuffer.keys())).to.be.deep.equal([persistedKey0], "incorrect persisted keys"); + expect(fileApisBuffer.get(persistedKey0)).to.be.deep.equal(stateBytesPersisted); + expect(await cache.getStateOrBytes(cpKeyPersisted)).to.be.deep.equal(stateBytesPersisted); + // simple get() does not reload from disk + expect(cache.get(cpKeyPersisted)).to.be.null; + // reload cpKeyPersisted from disk + expect((await cache.getOrReload(cpKeyPersisted))?.serialize()).to.be.deep.equal(stateBytesPersisted); + expect(await cache.pruneFromMemory()).to.be.equal(1); + // check the 2nd persisted checkpoint + const persistedKey2 = toCheckpointKey(cpKeyPersisted2); + expect(Array.from(fileApisBuffer.keys())).to.be.deep.equal([persistedKey2], "incorrect persisted keys"); + expect(fileApisBuffer.get(persistedKey2)).to.be.deep.equal(stateBytesPersisted2); + expect(await cache.getStateOrBytes(cpKeyPersisted2)).to.be.deep.equal(stateBytesPersisted2); + }); + } + + it("pruneFinalized", async function () { + cache.add(cp2, states["cp2"]); + await cache.pruneFromMemory(); + // cp0 is persisted + expect(fileApisBuffer.size).to.be.equal(1); + expect(Array.from(fileApisBuffer.keys())).to.be.deep.equal([cp0bKey]); + // cp1 is in memory + expect(cache.get(cp1Hex)).to.be.not.null; + // cp2 is in memory + expect(cache.get(cp2Hex)).to.be.not.null; + // finalize epoch cp2 + cache.pruneFinalized(cp2.epoch); + expect(fileApisBuffer.size).to.be.equal(0); + expect(cache.get(cp1Hex)).to.be.null; + expect(cache.get(cp2Hex)).to.be.not.null; + await cache.pruneFromMemory(); + }); + + describe("findClosestCheckpointState", function () { + const cacheMap = new Map(); + cacheMap.set(cp0aKey, states["cp0a"]); + cacheMap.set(cp1Key, states["cp1"]); + cacheMap.set(cp2Key, states["cp2"]); + const testCases: {name: string; epoch: Epoch; expectedState: CachedBeaconStateAllForks}[] = [ + { + name: "should return cp0 for epoch less than cp0", + epoch: 19, + expectedState: states["cp0a"], + }, + { + name: "should return cp0 for epoch same to cp0", + epoch: 20, + expectedState: states["cp0a"], + }, + { + name: "should return cp1 for epoch same to cp1", + epoch: 21, + expectedState: states["cp1"], + }, + { + name: "should return cp2 for epoch same to cp2", + epoch: 22, + expectedState: states["cp2"], + }, + { + name: "should return cp2 for epoch greater than cp2", + epoch: 23, + expectedState: states["cp2"], + }, + ]; + + for (const {name, epoch, expectedState} of testCases) { + it(name, function () { + const cpHex = toCheckpointHex({epoch, root: Buffer.alloc(32)}); + const state = findClosestCheckpointState(cpHex, cacheMap); + expect(state?.hashTreeRoot()).to.be.deep.equal(expectedState.hashTreeRoot()); + }); + } + }); +}); diff --git a/packages/beacon-node/test/unit/chain/stateCache/stateContextCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/stateContextCache.test.ts deleted file mode 100644 index 2ad38f8e93c..00000000000 --- a/packages/beacon-node/test/unit/chain/stateCache/stateContextCache.test.ts +++ /dev/null @@ -1,51 +0,0 @@ -import {expect} from "chai"; -import {toHexString} from "@chainsafe/ssz"; -import {EpochShuffling} from "@lodestar/state-transition"; -import {SLOTS_PER_EPOCH} from "@lodestar/params"; -import {Root} from "@lodestar/types"; -import {StateContextCache} from "../../../../src/chain/stateCache/index.js"; -import {generateCachedState} from "../../../utils/state.js"; -import {ZERO_HASH} from "../../../../src/constants/index.js"; - -describe("StateContextCache", function () { - let cache: StateContextCache; - let key1: Root, key2: Root; - const shuffling: EpochShuffling = { - epoch: 0, - activeIndices: [], - shuffling: [], - committees: [], - committeesPerSlot: 1, - }; - - beforeEach(function () { - // max 2 items - cache = new StateContextCache({maxStates: 2}); - const state1 = generateCachedState({slot: 0}); - key1 = state1.hashTreeRoot(); - state1.epochCtx.currentShuffling = {...shuffling, epoch: 0}; - cache.add(state1); - const state2 = generateCachedState({slot: 1 * SLOTS_PER_EPOCH}); - key2 = state2.hashTreeRoot(); - state2.epochCtx.currentShuffling = {...shuffling, epoch: 1}; - cache.add(state2); - }); - - it("should prune", function () { - expect(cache.size).to.be.equal(2, "Size must be same as initial 2"); - const state3 = generateCachedState({slot: 2 * SLOTS_PER_EPOCH}); - state3.epochCtx.currentShuffling = {...shuffling, epoch: 2}; - - cache.add(state3); - expect(cache.size).to.be.equal(3, "Size must be 2+1 after .add()"); - cache.prune(toHexString(ZERO_HASH)); - expect(cache.size).to.be.equal(2, "Size should reduce to initial 2 after prunning"); - expect(cache.get(toHexString(key1)), "must have key1").to.be.not.undefined; - expect(cache.get(toHexString(key2)), "must have key2").to.be.not.undefined; - }); - - it("should deleteAllBeforeEpoch", function () { - cache.deleteAllBeforeEpoch(2); - expect(cache.size).to.be.equal(0, "size must be 0 after delete all"); - }); -}); diff --git a/packages/beacon-node/test/unit/chain/validation/aggregateAndProof.test.ts b/packages/beacon-node/test/unit/chain/validation/aggregateAndProof.test.ts index 20300776a2e..a5b192abee4 100644 --- a/packages/beacon-node/test/unit/chain/validation/aggregateAndProof.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/aggregateAndProof.test.ts @@ -14,6 +14,7 @@ import { AggregateAndProofValidDataOpts, } from "../../../utils/validationData/aggregateAndProof.js"; import {IStateRegenerator} from "../../../../src/chain/regen/interface.js"; +import {ShufflingCache} from "../../../../src/chain/shufflingCache.js"; describe("chain / validation / aggregateAndProof", () => { const vc = 64; @@ -123,6 +124,12 @@ describe("chain / validation / aggregateAndProof", () => { (chain as {regen: IStateRegenerator}).regen = { getState: async () => committeeState, } as Partial as IStateRegenerator; + class NoOpShufflingCache extends ShufflingCache { + processState(): void { + // do nothing + } + } + (chain as {shufflingCache: ShufflingCache}).shufflingCache = new NoOpShufflingCache(); await expectError(chain, signedAggregateAndProof, AttestationErrorCode.NO_COMMITTEE_FOR_SLOT_AND_INDEX); }); diff --git a/packages/beacon-node/test/unit/chain/validation/attestation.test.ts b/packages/beacon-node/test/unit/chain/validation/attestation.test.ts index 36f4ddf54a6..a950345f816 100644 --- a/packages/beacon-node/test/unit/chain/validation/attestation.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/attestation.test.ts @@ -5,10 +5,10 @@ import type {PublicKey, SecretKey} from "@chainsafe/bls/types"; import bls from "@chainsafe/bls"; import {ForkName, SLOTS_PER_EPOCH} from "@lodestar/params"; import {defaultChainConfig, createChainForkConfig, BeaconConfig} from "@lodestar/config"; -import {ProtoBlock} from "@lodestar/fork-choice"; +import {EpochDifference, ForkChoice, ProtoBlock} from "@lodestar/fork-choice"; // eslint-disable-next-line import/no-relative-packages -import {SignatureSetType, computeEpochAtSlot, computeStartSlotAtEpoch, processSlots} from "@lodestar/state-transition"; -import {Slot, ssz} from "@lodestar/types"; +import {EpochShuffling, SignatureSetType, computeStartSlotAtEpoch, processSlots} from "@lodestar/state-transition"; +import {ssz} from "@lodestar/types"; // eslint-disable-next-line import/no-relative-packages import {generateTestCachedBeaconStateOnlyValidators} from "../../../../../state-transition/test/perf/util.js"; import {IBeaconChain} from "../../../../src/chain/index.js"; @@ -21,23 +21,23 @@ import { import { ApiAttestation, GossipAttestation, - getStateForAttestationVerification, validateApiAttestation, Step0Result, validateAttestation, validateGossipAttestationsSameAttData, + getShufflingForAttestationVerification, } from "../../../../src/chain/validation/index.js"; import {expectRejectedWithLodestarError} from "../../../utils/errors.js"; import {memoOnce} from "../../../utils/cache.js"; import {getAttestationValidData, AttestationValidDataOpts} from "../../../utils/validationData/attestation.js"; import {IStateRegenerator, RegenCaller} from "../../../../src/chain/regen/interface.js"; -import {StateRegenerator} from "../../../../src/chain/regen/regen.js"; import {ZERO_HASH_HEX} from "../../../../src/constants/constants.js"; import {QueuedStateRegenerator} from "../../../../src/chain/regen/queued.js"; import {BlsSingleThreadVerifier} from "../../../../src/chain/bls/singleThread.js"; import {SeenAttesters} from "../../../../src/chain/seenCache/seenAttesters.js"; import {getAttDataBase64FromAttestationSerialized} from "../../../../src/util/sszBytes.js"; +import {ShufflingCache} from "../../../../src/chain/shufflingCache.js"; describe("validateGossipAttestationsSameAttData", () => { // phase0Result specifies whether the attestation is valid in phase0 @@ -349,6 +349,12 @@ describe("validateAttestation", () => { (chain as {regen: IStateRegenerator}).regen = { getState: async () => committeeState, } as Partial as IStateRegenerator; + class NoOpShufflingCache extends ShufflingCache { + processState(): void { + // do nothing + } + } + (chain as {shufflingCache: ShufflingCache}).shufflingCache = new NoOpShufflingCache(); const serializedData = ssz.phase0.Attestation.serialize(attestation); await expectApiError( @@ -478,71 +484,109 @@ describe("validateAttestation", () => { } }); -describe("getStateForAttestationVerification", () => { +describe("getShufflingForAttestationVerification", () => { // eslint-disable-next-line @typescript-eslint/naming-convention const config = createChainForkConfig({...defaultChainConfig, CAPELLA_FORK_EPOCH: 2}); const sandbox = sinon.createSandbox(); let regenStub: SinonStubbedInstance & QueuedStateRegenerator; + let forkchoiceStub: SinonStubbedInstance & ForkChoice; + let shufflingCacheStub: SinonStubbedInstance & ShufflingCache; let chain: IBeaconChain; beforeEach(() => { regenStub = sandbox.createStubInstance(QueuedStateRegenerator) as SinonStubbedInstance & QueuedStateRegenerator; + forkchoiceStub = sandbox.createStubInstance(ForkChoice) as SinonStubbedInstance & ForkChoice; + shufflingCacheStub = sandbox.createStubInstance(ShufflingCache) as SinonStubbedInstance & + ShufflingCache; chain = { config: config as BeaconConfig, regen: regenStub, + forkChoice: forkchoiceStub, + shufflingCache: shufflingCacheStub, } as Partial as IBeaconChain; }); - afterEach(() => { - sandbox.restore(); + const attEpoch = 1000; + const blockRoot = "0xd76aed834b4feef32efb53f9076e407c0d344cfdb70f0a770fa88416f70d304d"; + + it("block epoch is the same to attestation epoch", async () => { + const headSlot = computeStartSlotAtEpoch(attEpoch); + const attHeadBlock = { + slot: headSlot, + stateRoot: ZERO_HASH_HEX, + blockRoot, + } as Partial as ProtoBlock; + const previousDependentRoot = "0xa916b57729dbfb89a082820e0eb2b669d9d511a675d3d8c888b2f300f10b0bdf"; + forkchoiceStub.getDependentRoot.withArgs(attHeadBlock, EpochDifference.previous).returns(previousDependentRoot); + const expectedShuffling = {epoch: attEpoch} as EpochShuffling; + shufflingCacheStub.get.withArgs(attEpoch, previousDependentRoot).returns(expectedShuffling); + const resultShuffling = await getShufflingForAttestationVerification( + chain, + attEpoch, + attHeadBlock, + RegenCaller.validateGossipAttestation + ); + expect(resultShuffling).to.be.deep.equal(expectedShuffling); }); - const forkSlot = computeStartSlotAtEpoch(config.CAPELLA_FORK_EPOCH); - const getBlockSlotStateTestCases: {id: string; attSlot: Slot; headSlot: Slot; regenCall: keyof StateRegenerator}[] = [ - { - id: "should call regen.getBlockSlotState at fork boundary", - attSlot: forkSlot + 1, - headSlot: forkSlot - 1, - regenCall: "getBlockSlotState", - }, - { - id: "should call regen.getBlockSlotState if > 1 epoch difference", - attSlot: forkSlot + 2 * SLOTS_PER_EPOCH, - headSlot: forkSlot + 1, - regenCall: "getBlockSlotState", - }, - { - id: "should call getState if 1 epoch difference", - attSlot: forkSlot + 2 * SLOTS_PER_EPOCH, - headSlot: forkSlot + SLOTS_PER_EPOCH, - regenCall: "getState", - }, - { - id: "should call getState if 0 epoch difference", - attSlot: forkSlot + 2 * SLOTS_PER_EPOCH, - headSlot: forkSlot + 2 * SLOTS_PER_EPOCH, - regenCall: "getState", - }, - ]; + it("block epoch is previous attestation epoch", async () => { + const headSlot = computeStartSlotAtEpoch(attEpoch - 1); + const attHeadBlock = { + slot: headSlot, + stateRoot: ZERO_HASH_HEX, + blockRoot, + } as Partial as ProtoBlock; + const currentDependentRoot = "0xa916b57729dbfb89a082820e0eb2b669d9d511a675d3d8c888b2f300f10b0bdf"; + forkchoiceStub.getDependentRoot.withArgs(attHeadBlock, EpochDifference.current).returns(currentDependentRoot); + const expectedShuffling = {epoch: attEpoch} as EpochShuffling; + shufflingCacheStub.get.withArgs(attEpoch, currentDependentRoot).returns(expectedShuffling); + const resultShuffling = await getShufflingForAttestationVerification( + chain, + attEpoch, + attHeadBlock, + RegenCaller.validateGossipAttestation + ); + expect(resultShuffling).to.be.deep.equal(expectedShuffling); + }); + + it("block epoch is attestation epoch - 2", async () => { + const headSlot = computeStartSlotAtEpoch(attEpoch - 2); + const attHeadBlock = { + slot: headSlot, + stateRoot: ZERO_HASH_HEX, + blockRoot, + } as Partial as ProtoBlock; + const expectedShuffling = {epoch: attEpoch} as EpochShuffling; + shufflingCacheStub.get.withArgs(attEpoch, blockRoot).onFirstCall().returns(null); + shufflingCacheStub.get.withArgs(attEpoch, blockRoot).onSecondCall().returns(expectedShuffling); + const resultShuffling = await getShufflingForAttestationVerification( + chain, + attEpoch, + attHeadBlock, + RegenCaller.validateGossipAttestation + ); + sandbox.assert.notCalled(forkchoiceStub.getDependentRoot); + expect(resultShuffling).to.be.deep.equal(expectedShuffling); + }); - for (const {id, attSlot, headSlot, regenCall} of getBlockSlotStateTestCases) { - it(id, async () => { - const attEpoch = computeEpochAtSlot(attSlot); - const attHeadBlock = { - slot: headSlot, - stateRoot: ZERO_HASH_HEX, - blockRoot: ZERO_HASH_HEX, - } as Partial as ProtoBlock; - expect(regenStub[regenCall].callCount).to.equal(0); - await getStateForAttestationVerification( + it("block epoch is attestation epoch + 1", async () => { + const headSlot = computeStartSlotAtEpoch(attEpoch + 1); + const attHeadBlock = { + slot: headSlot, + stateRoot: ZERO_HASH_HEX, + blockRoot, + } as Partial as ProtoBlock; + try { + await getShufflingForAttestationVerification( chain, - attSlot, attEpoch, attHeadBlock, RegenCaller.validateGossipAttestation ); - expect(regenStub[regenCall].callCount).to.equal(1); - }); - } + expect.fail("Expect error because attestation epoch is greater than block epoch"); + } catch (e) { + expect(e instanceof Error).to.be.true; + } + }); }); diff --git a/packages/beacon-node/test/unit/util/array.test.ts b/packages/beacon-node/test/unit/util/array.test.ts index 05262368e0d..f4ffd5c1303 100644 --- a/packages/beacon-node/test/unit/util/array.test.ts +++ b/packages/beacon-node/test/unit/util/array.test.ts @@ -103,6 +103,35 @@ describe("LinkedList", () => { expect(list.last()).to.be.equal(98); }); + describe("moveToHead", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + }); + + it("item is head", () => { + list.moveToHead(1); + expect(list.toArray()).to.be.deep.equal([1, 2, 3]); + expect(list.first()).to.be.equal(1); + }); + + it("item is middle", () => { + list.moveToHead(2); + expect(list.toArray()).to.be.deep.equal([2, 1, 3]); + expect(list.first()).to.be.equal(2); + }); + + it("item is tail", () => { + list.moveToHead(3); + expect(list.toArray()).to.be.deep.equal([3, 1, 2]); + expect(list.first()).to.be.equal(3); + }); + }); + it("values", () => { expect(Array.from(list.values())).to.be.deep.equal([]); const count = 100; diff --git a/packages/beacon-node/test/unit/util/sszBytes.test.ts b/packages/beacon-node/test/unit/util/sszBytes.test.ts index 58b39dda82b..3962835e0cf 100644 --- a/packages/beacon-node/test/unit/util/sszBytes.test.ts +++ b/packages/beacon-node/test/unit/util/sszBytes.test.ts @@ -1,6 +1,9 @@ import {expect} from "chai"; +import {config} from "@lodestar/config/default"; +import {createChainForkConfig} from "@lodestar/config"; import {deneb, Epoch, phase0, RootHex, Slot, ssz} from "@lodestar/types"; import {fromHex, toHex} from "@lodestar/utils"; +import {computeStartSlotAtEpoch} from "@lodestar/state-transition"; import { getAttDataBase64FromAttestationSerialized, getAttDataBase64FromSignedAggregateAndProofSerialized, @@ -12,7 +15,10 @@ import { getSignatureFromAttestationSerialized, getSlotFromSignedBeaconBlockSerialized, getSlotFromSignedBlobSidecarSerialized, + getValidatorsBytesFromStateBytes, + getWithdrawalCredentialFirstByteFromValidatorBytes, } from "../../../src/util/sszBytes.js"; +import {generateState} from "../../utils/state.js"; describe("attestation SSZ serialized picking", () => { const testCases: phase0.Attestation[] = [ @@ -166,6 +172,78 @@ describe("signedBlobSidecar SSZ serialized picking", () => { }); }); +describe("validators bytes utils", () => { + it("phase0", () => { + const state = generateState({slot: 100}, config); + expect(state.validators.length).to.be.equal(16); + for (let i = 0; i < state.validators.length; i++) { + state.validators.get(i).withdrawalCredentials = Buffer.alloc(32, i % 2); + } + state.commit(); + const validatorsBytes = state.validators.serialize(); + const stateBytes = state.serialize(); + expect(getValidatorsBytesFromStateBytes(config, stateBytes)).to.be.deep.equal(validatorsBytes); + for (let i = 0; i < state.validators.length; i++) { + expect(getWithdrawalCredentialFirstByteFromValidatorBytes(validatorsBytes, i)).to.be.equal(i % 2); + } + }); + + it("altair", () => { + // eslint-disable-next-line @typescript-eslint/naming-convention + const altairConfig = createChainForkConfig({...config, ALTAIR_FORK_EPOCH: 100}); + const state = generateState({slot: computeStartSlotAtEpoch(altairConfig.ALTAIR_FORK_EPOCH) + 100}, altairConfig); + expect(state.validators.length).to.be.equal(16); + for (let i = 0; i < state.validators.length; i++) { + state.validators.get(i).withdrawalCredentials = Buffer.alloc(32, i % 2); + } + state.commit(); + const validatorsBytes = state.validators.serialize(); + const stateBytes = state.serialize(); + expect(getValidatorsBytesFromStateBytes(altairConfig, stateBytes)).to.be.deep.equal(validatorsBytes); + for (let i = 0; i < state.validators.length; i++) { + expect(getWithdrawalCredentialFirstByteFromValidatorBytes(validatorsBytes, i)).to.be.equal(i % 2); + } + }); + + it("bellatrix", () => { + // eslint-disable-next-line @typescript-eslint/naming-convention + const bellatrixConfig = createChainForkConfig({...config, BELLATRIX_FORK_EPOCH: 100}); + const state = generateState( + {slot: computeStartSlotAtEpoch(bellatrixConfig.BELLATRIX_FORK_EPOCH) + 100}, + bellatrixConfig + ); + expect(state.validators.length).to.be.equal(16); + for (let i = 0; i < state.validators.length; i++) { + state.validators.get(i).withdrawalCredentials = Buffer.alloc(32, i % 2); + } + state.commit(); + const validatorsBytes = state.validators.serialize(); + const stateBytes = state.serialize(); + expect(getValidatorsBytesFromStateBytes(bellatrixConfig, stateBytes)).to.be.deep.equal(validatorsBytes); + for (let i = 0; i < state.validators.length; i++) { + expect(getWithdrawalCredentialFirstByteFromValidatorBytes(validatorsBytes, i)).to.be.equal(i % 2); + } + }); + + // TODO: figure out the "undefined or null" error in the test below + it.skip("capella", () => { + // eslint-disable-next-line @typescript-eslint/naming-convention + const capellaConfig = createChainForkConfig({...config, CAPELLA_FORK_EPOCH: 100}); + const state = generateState({slot: computeStartSlotAtEpoch(capellaConfig.CAPELLA_FORK_EPOCH) + 100}, capellaConfig); + expect(state.validators.length).to.be.equal(16); + for (let i = 0; i < state.validators.length; i++) { + state.validators.get(i).withdrawalCredentials = Buffer.alloc(32, i % 2); + } + state.commit(); + const validatorsBytes = state.validators.serialize(); + const stateBytes = state.serialize(); + expect(getValidatorsBytesFromStateBytes(capellaConfig, stateBytes)).to.be.deep.equal(validatorsBytes); + for (let i = 0; i < state.validators.length; i++) { + expect(getWithdrawalCredentialFirstByteFromValidatorBytes(validatorsBytes, i)).to.be.equal(i % 2); + } + }); +}); + function attestationFromValues( slot: Slot, blockRoot: RootHex, diff --git a/packages/beacon-node/test/utils/mocks/db.ts b/packages/beacon-node/test/utils/mocks/db.ts index 731091bc8e6..16d7b32a1bc 100644 --- a/packages/beacon-node/test/utils/mocks/db.ts +++ b/packages/beacon-node/test/utils/mocks/db.ts @@ -1,4 +1,5 @@ import {IBeaconDb} from "../../../src/db/index.js"; +import {CheckpointStateRepository} from "../../../src/db/repositories/checkpointState.js"; import { AttesterSlashingRepository, BlockArchiveRepository, @@ -38,6 +39,7 @@ export function getStubbedBeaconDb(): IBeaconDb { // finalized states stateArchive: createStubInstance(StateArchiveRepository), + checkpointState: createStubInstance(CheckpointStateRepository), // op pool voluntaryExit: createStubInstance(VoluntaryExitRepository), diff --git a/packages/beacon-node/test/utils/network.ts b/packages/beacon-node/test/utils/network.ts index 02e8c66879f..44c1f92270f 100644 --- a/packages/beacon-node/test/utils/network.ts +++ b/packages/beacon-node/test/utils/network.ts @@ -83,6 +83,8 @@ export async function getNetworkForTest( disableLightClientServerOnImportBlockHead: true, disablePrepareNextSlot: true, minSameMessageSignatureSetsToBatch: 32, + maxStates: 32, + maxEpochsInMemory: 2, }, { config: beaconConfig, diff --git a/packages/beacon-node/test/utils/persistent.ts b/packages/beacon-node/test/utils/persistent.ts new file mode 100644 index 00000000000..074d399cea5 --- /dev/null +++ b/packages/beacon-node/test/utils/persistent.ts @@ -0,0 +1,23 @@ +import {CPStatePersistentApis} from "../../src/chain/stateCache/persistent/types.js"; + +export function getTestPersistentApi(fileApisBuffer: Map): CPStatePersistentApis { + const persistentApis: CPStatePersistentApis = { + init: () => Promise.resolve(), + write: (cpKey, state) => { + if (!fileApisBuffer.has(cpKey)) { + fileApisBuffer.set(cpKey, state.serialize()); + } + return Promise.resolve(cpKey); + }, + remove: (filePath) => { + if (fileApisBuffer.has(filePath)) { + fileApisBuffer.delete(filePath); + return Promise.resolve(true); + } + return Promise.resolve(false); + }, + read: (filePath) => Promise.resolve(fileApisBuffer.get(filePath) || Buffer.alloc(0)), + }; + + return persistentApis; +} diff --git a/packages/beacon-node/test/utils/validationData/attestation.ts b/packages/beacon-node/test/utils/validationData/attestation.ts index 6f768227e5c..fa3c4d479ad 100644 --- a/packages/beacon-node/test/utils/validationData/attestation.ts +++ b/packages/beacon-node/test/utils/validationData/attestation.ts @@ -1,10 +1,13 @@ import {BitArray, toHexString} from "@chainsafe/ssz"; -import {computeEpochAtSlot, computeSigningRoot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; +import { + computeEpochAtSlot, + computeSigningRoot, + computeStartSlotAtEpoch, + getShufflingDecisionBlock, +} from "@lodestar/state-transition"; import {ProtoBlock, IForkChoice, ExecutionStatus} from "@lodestar/fork-choice"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {phase0, Slot, ssz} from "@lodestar/types"; -import {config} from "@lodestar/config/default"; -import {BeaconConfig} from "@lodestar/config"; import { generateTestCachedBeaconStateOnlyValidators, getSecretKeyFromIndexCached, @@ -21,6 +24,7 @@ import {SeenAggregatedAttestations} from "../../../src/chain/seenCache/seenAggre import {SeenAttestationDatas} from "../../../src/chain/seenCache/seenAttestationData.js"; import {defaultChainOptions} from "../../../src/chain/options.js"; import {testLogger} from "../logger.js"; +import {ShufflingCache} from "../../../src/chain/shufflingCache.js"; export type AttestationValidDataOpts = { currentSlot?: Slot; @@ -73,6 +77,12 @@ export function getAttestationValidData(opts: AttestationValidDataOpts): { ...{executionPayloadBlockHash: null, executionStatus: ExecutionStatus.PreMerge}, }; + + const shufflingCache = new ShufflingCache(); + shufflingCache.processState(state, state.epochCtx.currentShuffling.epoch); + shufflingCache.processState(state, state.epochCtx.nextShuffling.epoch); + const dependentRoot = getShufflingDecisionBlock(state, state.epochCtx.currentShuffling.epoch); + const forkChoice = { getBlock: (root) => { if (!ssz.Root.equals(root, beaconBlockRoot)) return null; @@ -82,6 +92,7 @@ export function getAttestationValidData(opts: AttestationValidDataOpts): { if (rootHex !== toHexString(beaconBlockRoot)) return null; return headBlock; }, + getDependentRoot: () => dependentRoot, } as Partial as IForkChoice; const committeeIndices = state.epochCtx.getBeaconCommittee(attSlot, attIndex); @@ -117,11 +128,13 @@ export function getAttestationValidData(opts: AttestationValidDataOpts): { // Add state to regen const regen = { getState: async () => state, + // TODO: remove this once we have a better way to get state + getStateSync: () => state, } as Partial as IStateRegenerator; const chain = { clock, - config: config as BeaconConfig, + config: state.config, forkChoice, regen, seenAttesters: new SeenAttesters(), @@ -132,6 +145,7 @@ export function getAttestationValidData(opts: AttestationValidDataOpts): { : new BlsMultiThreadWorkerPool({}, {logger: testLogger(), metrics: null}), waitForBlock: () => Promise.resolve(false), index2pubkey: state.epochCtx.index2pubkey, + shufflingCache, opts: defaultChainOptions, } as Partial as IBeaconChain; diff --git a/packages/cli/package.json b/packages/cli/package.json index a4a387fc670..53e17fb0cee 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -59,7 +59,7 @@ "@chainsafe/bls-keystore": "^2.0.0", "@chainsafe/blst": "^0.2.9", "@chainsafe/discv5": "^5.1.0", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@chainsafe/threads": "^1.11.1", "@libp2p/crypto": "^2.0.2", "@libp2p/peer-id": "^3.0.1", diff --git a/packages/cli/src/options/beaconNodeOptions/chain.ts b/packages/cli/src/options/beaconNodeOptions/chain.ts index 359b77740b0..1c13d97236c 100644 --- a/packages/cli/src/options/beaconNodeOptions/chain.ts +++ b/packages/cli/src/options/beaconNodeOptions/chain.ts @@ -24,6 +24,10 @@ export type ChainArgs = { emitPayloadAttributes?: boolean; broadcastValidationStrictness?: string; "chain.minSameMessageSignatureSetsToBatch"?: number; + "chain.nHistoricalStates"?: boolean; + "chain.persistCheckpointStatesToFile"?: boolean; + "chain.maxStates"?: number; + "chain.maxEpochsInMemory"?: number; }; export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { @@ -49,6 +53,11 @@ export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { broadcastValidationStrictness: args["broadcastValidationStrictness"], minSameMessageSignatureSetsToBatch: args["chain.minSameMessageSignatureSetsToBatch"] ?? defaultOptions.chain.minSameMessageSignatureSetsToBatch, + nHistoricalStates: args["chain.nHistoricalStates"] ?? defaultOptions.chain.nHistoricalStates, + persistCheckpointStatesToFile: + args["chain.persistCheckpointStatesToFile"] ?? defaultOptions.chain.persistCheckpointStatesToFile, + maxStates: args["chain.maxStates"] ?? defaultOptions.chain.maxStates, + maxEpochsInMemory: args["chain.maxEpochsInMemory"] ?? defaultOptions.chain.maxEpochsInMemory, }; } @@ -193,4 +202,36 @@ Will double processing times. Use only for debugging purposes.", default: defaultOptions.chain.minSameMessageSignatureSetsToBatch, group: "chain", }, + + "chain.nHistoricalStates": { + hidden: true, + description: "Use persistent checkpoint state cache or not", + type: "number", + default: defaultOptions.chain.nHistoricalStates, + group: "chain", + }, + + "chain.persistCheckpointStatesToFile": { + hidden: true, + description: "Persist checkpoint states to file or not", + type: "number", + default: defaultOptions.chain.persistCheckpointStatesToFile, + group: "chain", + }, + + "chain.maxStates": { + hidden: true, + description: "Max states to cache in memory", + type: "number", + default: defaultOptions.chain.maxStates, + group: "chain", + }, + + "chain.maxEpochsInMemory": { + hidden: true, + description: "Max epochs to cache checkpoint states in memory", + type: "number", + default: defaultOptions.chain.maxEpochsInMemory, + group: "chain", + }, }; diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index b0f0254443d..124475f993a 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -34,6 +34,10 @@ describe("options / beaconNodeOptions", () => { "chain.archiveStateEpochFrequency": 1024, "chain.trustedSetup": "", "chain.minSameMessageSignatureSetsToBatch": 32, + "chain.nHistoricalStates": true, + "chain.persistCheckpointStatesToFile": true, + "chain.maxStates": 32, + "chain.maxEpochsInMemory": 2, emitPayloadAttributes: false, eth1: true, @@ -135,6 +139,10 @@ describe("options / beaconNodeOptions", () => { emitPayloadAttributes: false, trustedSetup: "", minSameMessageSignatureSetsToBatch: 32, + nHistoricalStates: true, + persistCheckpointStatesToFile: true, + maxStates: 32, + maxEpochsInMemory: 2, }, eth1: { enabled: true, diff --git a/packages/config/package.json b/packages/config/package.json index 8fd9bd835b5..bf2aeea6e0c 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -64,7 +64,7 @@ "blockchain" ], "dependencies": { - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/params": "^1.11.1", "@lodestar/types": "^1.11.1" } diff --git a/packages/db/package.json b/packages/db/package.json index f9227668e82..cb13d1ce956 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -37,7 +37,7 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/config": "^1.11.1", "@lodestar/utils": "^1.11.1", "@types/levelup": "^4.3.3", diff --git a/packages/fork-choice/package.json b/packages/fork-choice/package.json index 0e8de6bf52f..33f7832e755 100644 --- a/packages/fork-choice/package.json +++ b/packages/fork-choice/package.json @@ -38,7 +38,7 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/config": "^1.11.1", "@lodestar/params": "^1.11.1", "@lodestar/state-transition": "^1.11.1", diff --git a/packages/light-client/package.json b/packages/light-client/package.json index 95bca9e36b2..d7464b80085 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -66,7 +66,7 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/persistent-merkle-tree": "^0.5.0", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/api": "^1.11.1", "@lodestar/config": "^1.11.1", "@lodestar/params": "^1.11.1", diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index 133e149188b..7b1791f6784 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -61,16 +61,16 @@ "@chainsafe/bls": "7.1.1", "@chainsafe/persistent-merkle-tree": "^0.5.0", "@chainsafe/persistent-ts": "^0.19.1", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/config": "^1.11.1", "@lodestar/params": "^1.11.1", "@lodestar/types": "^1.11.1", "@lodestar/utils": "^1.11.1", "bigint-buffer": "^1.1.5", + "@chainsafe/blst": "^0.2.9", "buffer-xor": "^2.0.2" }, "devDependencies": { - "@chainsafe/blst": "^0.2.9", "@types/buffer-xor": "^2.0.0", "@types/mockery": "^1.4.30", "mockery": "^2.1.0" diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index aeefc4769ac..581a0e49303 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -25,12 +25,12 @@ import { getSeed, computeProposers, } from "../util/index.js"; -import {computeEpochShuffling, EpochShuffling} from "../util/epochShuffling.js"; +import {computeEpochShuffling, EpochShuffling, getShufflingDecisionBlock} from "../util/epochShuffling.js"; import {computeBaseRewardPerIncrement, computeSyncParticipantReward} from "../util/syncCommittee.js"; import {sumTargetUnslashedBalanceIncrements} from "../util/targetUnslashedBalance.js"; import {EffectiveBalanceIncrements, getEffectiveBalanceIncrementsWithLen} from "./effectiveBalanceIncrements.js"; import {Index2PubkeyCache, PubkeyIndexMap, syncPubkeys} from "./pubkeyCache.js"; -import {BeaconStateAllForks, BeaconStateAltair} from "./types.js"; +import {BeaconStateAllForks, BeaconStateAltair, ShufflingGetter} from "./types.js"; import { computeSyncCommitteeCache, getSyncCommitteeCache, @@ -50,6 +50,7 @@ export type EpochCacheImmutableData = { export type EpochCacheOpts = { skipSyncCommitteeCache?: boolean; skipSyncPubkeys?: boolean; + shufflingGetter?: ShufflingGetter; }; /** Defers computing proposers by persisting only the seed, and dropping it once indexes are computed */ @@ -272,21 +273,28 @@ export class EpochCache { const currentActiveIndices: ValidatorIndex[] = []; const nextActiveIndices: ValidatorIndex[] = []; + const previousShufflingDecisionBlock = getShufflingDecisionBlock(state, previousEpoch); + const previousShufflingIn = opts?.shufflingGetter?.(previousEpoch, previousShufflingDecisionBlock); + const currentShufflingDecisionBlock = getShufflingDecisionBlock(state, currentEpoch); + const currentShufflingIn = opts?.shufflingGetter?.(currentEpoch, currentShufflingDecisionBlock); + const nextShufflingDecisionBlock = getShufflingDecisionBlock(state, nextEpoch); + const nextShufflingIn = opts?.shufflingGetter?.(nextEpoch, nextShufflingDecisionBlock); + for (let i = 0; i < validatorCount; i++) { const validator = validators[i]; // Note: Not usable for fork-choice balances since in-active validators are not zero'ed effectiveBalanceIncrements[i] = Math.floor(validator.effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); - if (isActiveValidator(validator, previousEpoch)) { + if (previousShufflingIn === undefined && isActiveValidator(validator, previousEpoch)) { previousActiveIndices.push(i); } - if (isActiveValidator(validator, currentEpoch)) { + if (currentShufflingIn === undefined && isActiveValidator(validator, currentEpoch)) { currentActiveIndices.push(i); // We track totalActiveBalanceIncrements as ETH to fit total network balance in a JS number (53 bits) totalActiveBalanceIncrements += effectiveBalanceIncrements[i]; } - if (isActiveValidator(validator, nextEpoch)) { + if (nextShufflingIn === undefined && isActiveValidator(validator, nextEpoch)) { nextActiveIndices.push(i); } @@ -309,11 +317,11 @@ export class EpochCache { throw Error("totalActiveBalanceIncrements >= Number.MAX_SAFE_INTEGER. MAX_EFFECTIVE_BALANCE is too low."); } - const currentShuffling = computeEpochShuffling(state, currentActiveIndices, currentEpoch); - const previousShuffling = isGenesis - ? currentShuffling - : computeEpochShuffling(state, previousActiveIndices, previousEpoch); - const nextShuffling = computeEpochShuffling(state, nextActiveIndices, nextEpoch); + const currentShuffling = currentShufflingIn ?? computeEpochShuffling(state, currentActiveIndices, currentEpoch); + const previousShuffling = + previousShufflingIn ?? + (isGenesis ? currentShuffling : computeEpochShuffling(state, previousActiveIndices, previousEpoch)); + const nextShuffling = nextShufflingIn ?? computeEpochShuffling(state, nextActiveIndices, nextEpoch); const currentProposerSeed = getSeed(state, currentEpoch, DOMAIN_BEACON_PROPOSER); diff --git a/packages/state-transition/src/cache/stateCache.ts b/packages/state-transition/src/cache/stateCache.ts index f8ce97d5ffb..37aa95723a5 100644 --- a/packages/state-transition/src/cache/stateCache.ts +++ b/packages/state-transition/src/cache/stateCache.ts @@ -1,4 +1,7 @@ +import bls from "@chainsafe/bls"; +import {CoordType} from "@chainsafe/blst"; import {BeaconConfig} from "@lodestar/config"; +import {loadState} from "../util/loadState.js"; import {EpochCache, EpochCacheImmutableData, EpochCacheOpts} from "./epochCache.js"; import { BeaconStateAllForks, @@ -137,13 +140,48 @@ export function createCachedBeaconState( immutableData: EpochCacheImmutableData, opts?: EpochCacheOpts ): T & BeaconStateCache { - return getCachedBeaconState(state, { + const epochCache = EpochCache.createFromState(state, immutableData, opts); + const cachedState = getCachedBeaconState(state, { config: immutableData.config, - epochCtx: EpochCache.createFromState(state, immutableData, opts), + epochCtx: epochCache, clonedCount: 0, clonedCountWithTransferCache: 0, createdWithTransferCache: false, }); + + return cachedState; +} + +/** + * Create a CachedBeaconState given a cached seed state and state bytes + * This guarantees that the returned state shares the same tree with the seed state + * Check loadState() api for more details + */ +export function loadCachedBeaconState( + cachedSeedState: T, + stateBytes: Uint8Array, + opts?: EpochCacheOpts +): T { + const {state: migratedState, modifiedValidators} = loadState(cachedSeedState.config, cachedSeedState, stateBytes); + const {pubkey2index, index2pubkey} = cachedSeedState.epochCtx; + // Get the validators sub tree once for all the loop + const validators = migratedState.validators; + for (const validatorIndex of modifiedValidators) { + const validator = validators.getReadonly(validatorIndex); + const pubkey = validator.pubkey; + pubkey2index.set(pubkey, validatorIndex); + index2pubkey[validatorIndex] = bls.PublicKey.fromBytes(pubkey, CoordType.jacobian); + } + + return createCachedBeaconState( + migratedState, + { + config: cachedSeedState.config, + pubkey2index, + index2pubkey, + }, + {...(opts ?? {}), ...{skipSyncPubkeys: true}} + ) as T; } /** diff --git a/packages/state-transition/src/cache/types.ts b/packages/state-transition/src/cache/types.ts index 9d0115cee78..39b1dbb4b45 100644 --- a/packages/state-transition/src/cache/types.ts +++ b/packages/state-transition/src/cache/types.ts @@ -1,5 +1,6 @@ import {CompositeViewDU} from "@chainsafe/ssz"; -import {ssz} from "@lodestar/types"; +import {Epoch, RootHex, ssz} from "@lodestar/types"; +import {EpochShuffling} from "../util/epochShuffling.js"; export type BeaconStatePhase0 = CompositeViewDU; export type BeaconStateAltair = CompositeViewDU; @@ -20,3 +21,5 @@ export type BeaconStateAllForks = | BeaconStateDeneb; export type BeaconStateExecutions = BeaconStateBellatrix | BeaconStateCapella | BeaconStateDeneb; + +export type ShufflingGetter = (shufflingEpoch: Epoch, dependentRoot: RootHex) => EpochShuffling | null; diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index 8c9a296ebd9..ee45b0c0938 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -25,7 +25,8 @@ export { // Main state caches export { createCachedBeaconState, - BeaconStateCache, + loadCachedBeaconState, + type BeaconStateCache, isCachedBeaconState, isStateBalancesNodesPopulated, isStateValidatorsNodesPopulated, diff --git a/packages/state-transition/src/util/epochShuffling.ts b/packages/state-transition/src/util/epochShuffling.ts index 37ac6ba0c8d..efb02e759bd 100644 --- a/packages/state-transition/src/util/epochShuffling.ts +++ b/packages/state-transition/src/util/epochShuffling.ts @@ -1,6 +1,8 @@ -import {Epoch, ValidatorIndex} from "@lodestar/types"; +import {toHexString} from "@chainsafe/ssz"; +import {Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; import {intDiv} from "@lodestar/utils"; import { + ATTESTATION_SUBNET_COUNT, DOMAIN_BEACON_ATTESTER, MAX_COMMITTEES_PER_SLOT, SLOTS_PER_EPOCH, @@ -9,6 +11,8 @@ import { import {BeaconStateAllForks} from "../types.js"; import {getSeed} from "./seed.js"; import {unshuffleList} from "./shuffle.js"; +import {computeStartSlotAtEpoch} from "./epoch.js"; +import {getBlockRootAtSlot} from "./blockRoot.js"; /** * Readonly interface for EpochShuffling. @@ -95,3 +99,8 @@ export function computeEpochShuffling( committeesPerSlot, }; } + +export function getShufflingDecisionBlock(state: BeaconStateAllForks, epoch: Epoch): RootHex { + const pivotSlot = computeStartSlotAtEpoch(epoch - 1) - 1; + return toHexString(getBlockRootAtSlot(state, pivotSlot)); +} diff --git a/packages/state-transition/src/util/index.ts b/packages/state-transition/src/util/index.ts index bbc9bf8a865..5b990cf0843 100644 --- a/packages/state-transition/src/util/index.ts +++ b/packages/state-transition/src/util/index.ts @@ -24,3 +24,4 @@ export * from "./slot.js"; export * from "./syncCommittee.js"; export * from "./validator.js"; export * from "./weakSubjectivity.js"; +export * from "./loadState.js"; diff --git a/packages/state-transition/src/util/loadState.ts b/packages/state-transition/src/util/loadState.ts new file mode 100644 index 00000000000..18eac53e6f3 --- /dev/null +++ b/packages/state-transition/src/util/loadState.ts @@ -0,0 +1,251 @@ +import {CompositeTypeAny, Type} from "@chainsafe/ssz"; +import {ssz} from "@lodestar/types"; +import {ForkSeq} from "@lodestar/params"; +import {ChainForkConfig} from "@lodestar/config"; +import {BeaconStateAllForks, BeaconStateAltair, BeaconStatePhase0} from "../types.js"; +import {VALIDATOR_BYTES_SIZE, getForkFromStateBytes, getStateTypeFromBytes} from "./sszBytes.js"; + +type BeaconStateType = + | typeof ssz.phase0.BeaconState + | typeof ssz.altair.BeaconState + | typeof ssz.bellatrix.BeaconState + | typeof ssz.capella.BeaconState + | typeof ssz.deneb.BeaconState; + +type MigrateStateOutput = {state: BeaconStateAllForks; modifiedValidators: number[]}; + +/** + * Load state from bytes given a seed state so that we share the same base tree. This gives some benefits: + * - Have single base tree across the application + * - Faster to load state + * - Less memory usage + * - Ultilize the cached HashObjects in seed state due to a lot of validators are not changed, also the inactivity scores. + * @returns the new state and modified validators + */ +export function loadState( + config: ChainForkConfig, + seedState: BeaconStateAllForks, + stateBytes: Uint8Array +): MigrateStateOutput { + const stateType = getStateTypeFromBytes(config, stateBytes) as BeaconStateType; + const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); + const fieldRanges = stateType.getFieldRanges(dataView, 0, stateBytes.length); + const allFields = Object.keys(stateType.fields); + const validatorsFieldIndex = allFields.indexOf("validators"); + const migratedState = stateType.defaultViewDU(); + // validators is rarely changed + const validatorsRange = fieldRanges[validatorsFieldIndex]; + const modifiedValidators = loadValidators( + migratedState, + seedState, + stateBytes.subarray(validatorsRange.start, validatorsRange.end) + ); + // inactivityScores + // this takes ~500 to hashTreeRoot while this field is rarely changed + const fork = getForkFromStateBytes(config, stateBytes); + const seedFork = config.getForkSeq(seedState.slot); + + let loadedInactivityScores = false; + if (fork >= ForkSeq.altair && seedFork >= ForkSeq.altair) { + loadedInactivityScores = true; + const inactivityScoresIndex = allFields.indexOf("inactivityScores"); + const inactivityScoresRange = fieldRanges[inactivityScoresIndex]; + loadInactivityScores( + migratedState as BeaconStateAltair, + seedState as BeaconStateAltair, + stateBytes.subarray(inactivityScoresRange.start, inactivityScoresRange.end) + ); + } + for (const [fieldName, typeUnknown] of Object.entries(stateType.fields)) { + // loaded above + if (fieldName === "validators" || (loadedInactivityScores && fieldName === "inactivityScores")) { + continue; + } + const field = fieldName as Exclude; + const type = typeUnknown as Type; + const fieldIndex = allFields.indexOf(field); + const fieldRange = fieldRanges[fieldIndex]; + if (type.isBasic) { + (migratedState as BeaconStatePhase0)[field] = type.deserialize( + stateBytes.subarray(fieldRange.start, fieldRange.end) + ) as never; + } else { + (migratedState as BeaconStatePhase0)[field] = (type as CompositeTypeAny).deserializeToViewDU( + stateBytes.subarray(fieldRange.start, fieldRange.end) + ) as never; + } + } + migratedState.commit(); + + return {state: migratedState, modifiedValidators}; +} + +// state store inactivity scores of old seed state, we need to update it +// this value rarely changes even after 3 months of data as monitored on mainnet in Sep 2023 +function loadInactivityScores( + migratedState: BeaconStateAltair, + seedState: BeaconStateAltair, + inactivityScoresBytes: Uint8Array +): void { + // migratedState starts with the same inactivityScores to seed state + migratedState.inactivityScores = seedState.inactivityScores.clone(); + const oldValidator = migratedState.inactivityScores.length; + // UintNum64 = 8 bytes + const newValidator = inactivityScoresBytes.length / 8; + const minValidator = Math.min(oldValidator, newValidator); + const oldInactivityScores = migratedState.inactivityScores.serialize(); + const isMoreValidator = newValidator >= oldValidator; + const modifiedValidators: number[] = []; + findModifiedInactivityScores( + isMoreValidator ? oldInactivityScores : oldInactivityScores.subarray(0, minValidator * 8), + isMoreValidator ? inactivityScoresBytes.subarray(0, minValidator * 8) : inactivityScoresBytes, + modifiedValidators + ); + + for (const validatorIndex of modifiedValidators) { + migratedState.inactivityScores.set( + validatorIndex, + ssz.UintNum64.deserialize(inactivityScoresBytes.subarray(validatorIndex * 8, (validatorIndex + 1) * 8)) + ); + } + + if (isMoreValidator) { + // add new inactivityScores + for (let validatorIndex = oldValidator; validatorIndex < newValidator; validatorIndex++) { + migratedState.inactivityScores.push( + ssz.UintNum64.deserialize(inactivityScoresBytes.subarray(validatorIndex * 8, (validatorIndex + 1) * 8)) + ); + } + } else { + if (newValidator - 1 < 0) { + migratedState.inactivityScores = ssz.altair.InactivityScores.defaultViewDU(); + } else { + migratedState.inactivityScores = migratedState.inactivityScores.sliceTo(newValidator - 1); + } + } +} + +function loadValidators( + migratedState: BeaconStateAllForks, + seedState: BeaconStateAllForks, + newValidatorsBytes: Uint8Array +): number[] { + const seedValidatorCount = seedState.validators.length; + const newValidatorCount = Math.floor(newValidatorsBytes.length / VALIDATOR_BYTES_SIZE); + const isMoreValidator = newValidatorCount >= seedValidatorCount; + const minValidatorCount = Math.min(seedValidatorCount, newValidatorCount); + // migrated state starts with the same validators to seed state + migratedState.validators = seedState.validators.clone(); + const seedValidatorsBytes = seedState.validators.serialize(); + const modifiedValidators: number[] = []; + findModifiedValidators( + isMoreValidator ? seedValidatorsBytes : seedValidatorsBytes.subarray(0, minValidatorCount * VALIDATOR_BYTES_SIZE), + isMoreValidator ? newValidatorsBytes.subarray(0, minValidatorCount * VALIDATOR_BYTES_SIZE) : newValidatorsBytes, + modifiedValidators + ); + for (const i of modifiedValidators) { + migratedState.validators.set( + i, + ssz.phase0.Validator.deserializeToViewDU( + newValidatorsBytes.subarray(i * VALIDATOR_BYTES_SIZE, (i + 1) * VALIDATOR_BYTES_SIZE) + ) + ); + } + + if (newValidatorCount >= seedValidatorCount) { + // add new validators + for (let validatorIndex = seedValidatorCount; validatorIndex < newValidatorCount; validatorIndex++) { + migratedState.validators.push( + ssz.phase0.Validator.deserializeToViewDU( + newValidatorsBytes.subarray( + validatorIndex * VALIDATOR_BYTES_SIZE, + (validatorIndex + 1) * VALIDATOR_BYTES_SIZE + ) + ) + ); + modifiedValidators.push(validatorIndex); + } + } else { + migratedState.validators = migratedState.validators.sliceTo(newValidatorCount - 1); + } + return modifiedValidators; +} + +function findModifiedValidators( + validatorsBytes: Uint8Array, + validatorsBytes2: Uint8Array, + modifiedValidators: number[], + validatorOffset = 0 +): void { + if (validatorsBytes.length !== validatorsBytes2.length) { + throw new Error( + "validatorsBytes.length !== validatorsBytes2.length " + validatorsBytes.length + " vs " + validatorsBytes2.length + ); + } + + if (Buffer.compare(validatorsBytes, validatorsBytes2) === 0) { + return; + } + + if (validatorsBytes.length === VALIDATOR_BYTES_SIZE) { + modifiedValidators.push(validatorOffset); + return; + } + + const numValidator = Math.floor(validatorsBytes.length / VALIDATOR_BYTES_SIZE); + const halfValidator = Math.floor(numValidator / 2); + findModifiedValidators( + validatorsBytes.subarray(0, halfValidator * VALIDATOR_BYTES_SIZE), + validatorsBytes2.subarray(0, halfValidator * VALIDATOR_BYTES_SIZE), + modifiedValidators, + validatorOffset + ); + findModifiedValidators( + validatorsBytes.subarray(halfValidator * VALIDATOR_BYTES_SIZE), + validatorsBytes2.subarray(halfValidator * VALIDATOR_BYTES_SIZE), + modifiedValidators, + validatorOffset + halfValidator + ); +} + +// as monitored on mainnet, inactivityScores are not changed much and they are mostly 0 +function findModifiedInactivityScores( + inactivityScoresBytes: Uint8Array, + inactivityScoresBytes2: Uint8Array, + modifiedValidators: number[], + validatorOffset = 0 +): void { + if (inactivityScoresBytes.length !== inactivityScoresBytes2.length) { + throw new Error( + "inactivityScoresBytes.length !== inactivityScoresBytes2.length " + + inactivityScoresBytes.length + + " vs " + + inactivityScoresBytes2.length + ); + } + + if (Buffer.compare(inactivityScoresBytes, inactivityScoresBytes2) === 0) { + return; + } + + // UintNum64 = 8 bytes + if (inactivityScoresBytes.length === 8) { + modifiedValidators.push(validatorOffset); + return; + } + + const numValidator = Math.floor(inactivityScoresBytes.length / 8); + const halfValidator = Math.floor(numValidator / 2); + findModifiedInactivityScores( + inactivityScoresBytes.subarray(0, halfValidator * 8), + inactivityScoresBytes2.subarray(0, halfValidator * 8), + modifiedValidators, + validatorOffset + ); + findModifiedInactivityScores( + inactivityScoresBytes.subarray(halfValidator * 8), + inactivityScoresBytes2.subarray(halfValidator * 8), + modifiedValidators, + validatorOffset + halfValidator + ); +} diff --git a/packages/state-transition/src/util/sszBytes.ts b/packages/state-transition/src/util/sszBytes.ts new file mode 100644 index 00000000000..25b65626a0d --- /dev/null +++ b/packages/state-transition/src/util/sszBytes.ts @@ -0,0 +1,55 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {ForkSeq} from "@lodestar/params"; +import {Slot, allForks} from "@lodestar/types"; +import {bytesToInt} from "@lodestar/utils"; + +/** + * Slot uint64 + */ +const SLOT_BYTE_COUNT = 8; + +/** + * 48 + 32 + 8 + 1 + 8 + 8 + 8 + 8 = 121 + * ``` + * class Validator(Container): + pubkey: BLSPubkey [fixed - 48 bytes] + withdrawal_credentials: Bytes32 [fixed - 32 bytes] + effective_balance: Gwei [fixed - 8 bytes] + slashed: boolean [fixed - 1 byte] + # Status epochs + activation_eligibility_epoch: Epoch [fixed - 8 bytes] + activation_epoch: Epoch [fixed - 8 bytes] + exit_epoch: Epoch [fixed - 8 bytes] + withdrawable_epoch: Epoch [fixed - 8 bytes] + ``` + */ +export const VALIDATOR_BYTES_SIZE = 121; + +/** + * 8 + 32 = 40 + * ``` + * class BeaconState(Container): + * genesis_time: uint64 [fixed - 8 bytes] + * genesis_validators_root: Root [fixed - 32 bytes] + * slot: Slot [fixed - 8 bytes] + * ... + * ``` + */ +const SLOT_BYTES_POSITION_IN_STATE = 40; + +export function getForkFromStateBytes(config: ChainForkConfig, bytes: Buffer | Uint8Array): ForkSeq { + const slot = bytesToInt(bytes.subarray(SLOT_BYTES_POSITION_IN_STATE, SLOT_BYTES_POSITION_IN_STATE + SLOT_BYTE_COUNT)); + return config.getForkSeq(slot); +} + +export function getStateTypeFromBytes( + config: ChainForkConfig, + bytes: Buffer | Uint8Array +): allForks.AllForksSSZTypes["BeaconState"] { + const slot = getStateSlotFromBytes(bytes); + return config.getForkTypes(slot).BeaconState; +} + +export function getStateSlotFromBytes(bytes: Uint8Array): Slot { + return bytesToInt(bytes.subarray(SLOT_BYTES_POSITION_IN_STATE, SLOT_BYTES_POSITION_IN_STATE + SLOT_BYTE_COUNT)); +} diff --git a/packages/state-transition/test/perf/util/loadState.test.ts b/packages/state-transition/test/perf/util/loadState.test.ts new file mode 100644 index 00000000000..8694e1ddf1b --- /dev/null +++ b/packages/state-transition/test/perf/util/loadState.test.ts @@ -0,0 +1,96 @@ +import fs from "node:fs"; +import path from "node:path"; +import {expect} from "chai"; +import bls from "@chainsafe/bls"; +import {CoordType} from "@chainsafe/blst"; +import {fromHexString} from "@chainsafe/ssz"; +import {itBench} from "@dapplion/benchmark"; +import {Epoch, RootHex, ssz} from "@lodestar/types"; +import {config as defaultChainConfig} from "@lodestar/config/default"; +import {createBeaconConfig} from "@lodestar/config"; +import {loadState} from "../../../src/util/loadState.js"; +import {createCachedBeaconState} from "../../../src/cache/stateCache.js"; +import {Index2PubkeyCache, PubkeyIndexMap} from "../../../src/cache/pubkeyCache.js"; +import {EpochShuffling, getShufflingDecisionBlock} from "../../../src/util/epochShuffling.js"; + +describe("loadState", function () { + this.timeout(0); + const stateType = ssz.capella.BeaconState; + + const folder = "/Users/tuyennguyen/tuyen/state_migration"; + const data = Uint8Array.from(fs.readFileSync(path.join(folder, "mainnet_state_7335296.ssz"))); + + const seedState = stateType.deserializeToViewDU(data); + // cache all HashObjects + seedState.hashTreeRoot(); + const config = createBeaconConfig(defaultChainConfig, seedState.genesisValidatorsRoot); + // TODO: EIP-6110 - need to create 2 separate caches? + const pubkey2index = new PubkeyIndexMap(); + const index2pubkey: Index2PubkeyCache = []; + const cachedSeedState = createCachedBeaconState(seedState, { + config, + pubkey2index, + index2pubkey, + }); + + // TODO: precompute shufflings of state 7335360 to avoid the cost of computing shuffling + // as in reality we will have all shufflings + const shufflingGetter = (epoch: Epoch, deicisionBlock: RootHex): EpochShuffling | null => { + const shufflingCache = new Map>(); + const currentEpoch = cachedSeedState.epochCtx.currentShuffling.epoch; + const previousEpoch = currentEpoch - 1; + const nextEpoch = currentEpoch + 1; + const currentEpochDecisionBlock = getShufflingDecisionBlock(seedState, currentEpoch); + const previousEpochDecisionBlock = getShufflingDecisionBlock(seedState, previousEpoch); + const nextEpochDecisionBlock = getShufflingDecisionBlock(seedState, nextEpoch); + shufflingCache.set(currentEpoch, new Map([[currentEpochDecisionBlock, cachedSeedState.epochCtx.currentShuffling]])); + shufflingCache.set( + previousEpoch, + new Map([[previousEpochDecisionBlock, cachedSeedState.epochCtx.previousShuffling]]) + ); + shufflingCache.set(nextEpoch, new Map([[nextEpochDecisionBlock, cachedSeedState.epochCtx.nextShuffling]])); + return shufflingCache.get(epoch)?.get(deicisionBlock) ?? null; + }; + + const newStateBytes = Uint8Array.from(fs.readFileSync(path.join(folder, "mainnet_state_7335360.ssz"))); + // const stateRoot6543072 = fromHexString("0xcf0e3c93b080d1c870b9052031f77e08aecbbbba5e4e7b1898b108d76c981a31"); + // const stateRoot7335296 = fromHexString("0xc63b580b63b78c83693ff2b8897cf0e4fcbc46b8a2eab60a090b78ced36afd93"); + const stateRoot7335360 = fromHexString("0xaeb2f977a1502967e09394e81b8bcfdd5a077af82b99deea0dcd3698568efbeb"); + const newStateRoot = stateRoot7335360; + // IMPORTANT: should not load a new separate tree (enable the code below) or the number is not correct (too bad) + // const newState = stateType.deserializeToViewDU(newStateBytes); + // startTime = Date.now(); + // const newStateRoot = newState.hashTreeRoot(); + // console.log("state root of state", toHexString(newStateRoot)); + // console.log("@@@ hashTreeRoot of new state in", Date.now() - startTime, "ms"); + + /** + * My Mac M1 Pro 17:30 Sep 16 2023 + * ✔ migrate state from slot 7335296 64 slots difference 0.4225908 ops/s 2.366355 s/op - 14 runs 35.9 s + * ✔ migrate state from slot 7327776 1 day difference 0.3415936 ops/s 2.927455 s/op - 17 runs 52.6 s + * Memory diff: + * - 64 slots: 104.01 MB + * - 1 day: 113.49 MB + */ + itBench(`migrate state from slot ${seedState.slot} 64 slots difference`, () => { + const {state: migratedState, modifiedValidators} = loadState(config, seedState, newStateBytes); + expect(ssz.Root.equals(migratedState.hashTreeRoot(), newStateRoot)).to.be.true; + // Get the validators sub tree once for all the loop + const validators = migratedState.validators; + for (const validatorIndex of modifiedValidators) { + const validator = validators.getReadonly(validatorIndex); + const pubkey = validator.pubkey; + pubkey2index.set(pubkey, validatorIndex); + index2pubkey[validatorIndex] = bls.PublicKey.fromBytes(pubkey, CoordType.jacobian); + } + createCachedBeaconState( + migratedState, + { + config, + pubkey2index, + index2pubkey, + }, + {skipSyncPubkeys: true, shufflingGetter} + ); + }); +}); diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index 0367fd636e7..1f9a70d61bf 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -1,7 +1,13 @@ import {expect} from "chai"; import {ssz} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; +import {config} from "@lodestar/config/default"; +import {createBeaconConfig} from "@lodestar/config"; import {createCachedBeaconStateTest} from "../utils/state.js"; +import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; +import {createCachedBeaconState, loadCachedBeaconState} from "../../src/cache/stateCache.js"; +import {interopPubkeysCached} from "../utils/interop.js"; +import {modifyStateSameValidator, newStateWithValidators} from "../utils/capella.js"; describe("CachedBeaconState", () => { it("Clone and mutate", () => { @@ -54,4 +60,96 @@ describe("CachedBeaconState", () => { ".serialize() does not automatically commit" ); }); + + describe("loadCachedBeaconState", () => { + const numValidator = 16; + const pubkeys = interopPubkeysCached(2 * numValidator); + + const stateView = newStateWithValidators(numValidator); + const seedState = createCachedBeaconState( + stateView, + { + config: createBeaconConfig(config, stateView.genesisValidatorsRoot), + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }, + {skipSyncCommitteeCache: true} + ); + + const capellaStateType = ssz.capella.BeaconState; + + for (let validatorCountDelta = -numValidator; validatorCountDelta <= numValidator; validatorCountDelta++) { + const testName = `loadCachedBeaconState - ${validatorCountDelta > 0 ? "more" : "less"} ${Math.abs( + validatorCountDelta + )} validators`; + it(testName, () => { + const state = modifyStateSameValidator(stateView); + for (let i = 0; i < state.validators.length; i++) { + // only modify some validators + if (i % 5 === 0) { + state.inactivityScores.set(i, state.inactivityScores.get(i) + 1); + state.validators.get(i).effectiveBalance += 1; + } + } + + if (validatorCountDelta < 0) { + state.validators = state.validators.sliceTo(state.validators.length - 1 + validatorCountDelta); + + // inactivityScores + if (state.inactivityScores.length - 1 + validatorCountDelta >= 0) { + state.inactivityScores = state.inactivityScores.sliceTo( + state.inactivityScores.length - 1 + validatorCountDelta + ); + } else { + state.inactivityScores = capellaStateType.fields.inactivityScores.defaultViewDU(); + } + + // previousEpochParticipation + if (state.previousEpochParticipation.length - 1 + validatorCountDelta >= 0) { + state.previousEpochParticipation = state.previousEpochParticipation.sliceTo( + state.previousEpochParticipation.length - 1 + validatorCountDelta + ); + } else { + state.previousEpochParticipation = capellaStateType.fields.previousEpochParticipation.defaultViewDU(); + } + + // currentEpochParticipation + if (state.currentEpochParticipation.length - 1 + validatorCountDelta >= 0) { + state.currentEpochParticipation = state.currentEpochParticipation.sliceTo( + state.currentEpochParticipation.length - 1 + validatorCountDelta + ); + } else { + state.currentEpochParticipation = capellaStateType.fields.currentEpochParticipation.defaultViewDU(); + } + } else { + // more validators + for (let i = 0; i < validatorCountDelta; i++) { + const validator = ssz.phase0.Validator.defaultViewDU(); + validator.pubkey = pubkeys[numValidator + i]; + state.validators.push(validator); + state.inactivityScores.push(1); + state.previousEpochParticipation.push(0b11111111); + state.currentEpochParticipation.push(0b11111111); + } + } + state.commit(); + + // confirm loadState() result + const stateBytes = state.serialize(); + const newCachedState = loadCachedBeaconState(seedState, stateBytes, {skipSyncCommitteeCache: true}); + const newStateBytes = newCachedState.serialize(); + expect(newStateBytes).to.be.deep.equal(stateBytes, "loadState: state bytes are not equal"); + expect(newCachedState.hashTreeRoot()).to.be.deep.equal( + state.hashTreeRoot(), + "loadState: state root is not equal" + ); + + // confirm loadCachedBeaconState() result + for (let i = 0; i < newCachedState.validators.length; i++) { + expect(newCachedState.epochCtx.pubkey2index.get(newCachedState.validators.get(i).pubkey)).to.be.equal(i); + expect(newCachedState.epochCtx.index2pubkey[i].toBytes()).to.be.deep.equal(pubkeys[i]); + } + }); + } + }); }); diff --git a/packages/state-transition/test/utils/capella.ts b/packages/state-transition/test/utils/capella.ts index f0f44ae9471..5789c260f67 100644 --- a/packages/state-transition/test/utils/capella.ts +++ b/packages/state-transition/test/utils/capella.ts @@ -1,9 +1,11 @@ +import crypto from "node:crypto"; import {ssz} from "@lodestar/types"; import {config} from "@lodestar/config/default"; -import {BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX} from "@lodestar/params"; -import {CachedBeaconStateCapella} from "../../src/index.js"; +import {BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {BeaconStateCapella, CachedBeaconStateCapella} from "../../src/index.js"; import {createCachedBeaconStateTest} from "./state.js"; import {mulberry32} from "./rand.js"; +import {interopPubkeysCached} from "./interop.js"; export interface WithdrawalOpts { excessBalance: number; @@ -58,3 +60,59 @@ export function getExpectedWithdrawalsTestData(vc: number, opts: WithdrawalOpts) return createCachedBeaconStateTest(state, config, {skipSyncPubkeys: true}); } + +export function newStateWithValidators(numValidator: number): BeaconStateCapella { + // use real pubkeys to test loadCachedBeaconState api + const pubkeys = interopPubkeysCached(numValidator); + const capellaStateType = ssz.capella.BeaconState; + const stateView = capellaStateType.defaultViewDU(); + stateView.slot = config.CAPELLA_FORK_EPOCH * SLOTS_PER_EPOCH + 100; + + for (let i = 0; i < numValidator; i++) { + const validator = ssz.phase0.Validator.defaultViewDU(); + validator.pubkey = pubkeys[i]; + stateView.validators.push(validator); + stateView.balances.push(32); + stateView.inactivityScores.push(0); + stateView.previousEpochParticipation.push(0b11111111); + stateView.currentEpochParticipation.push(0b11111111); + } + stateView.commit(); + return stateView; +} + +/** + * Modify a state without changing number of validators + */ +export function modifyStateSameValidator(seedState: BeaconStateCapella): BeaconStateCapella { + const state = seedState.clone(); + state.slot = seedState.slot + 10; + state.latestBlockHeader = ssz.phase0.BeaconBlockHeader.toViewDU({ + slot: state.slot, + proposerIndex: 0, + parentRoot: state.hashTreeRoot(), + stateRoot: state.hashTreeRoot(), + bodyRoot: ssz.phase0.BeaconBlockBody.hashTreeRoot(ssz.phase0.BeaconBlockBody.defaultValue()), + }); + state.blockRoots.set(0, crypto.randomBytes(32)); + state.stateRoots.set(0, crypto.randomBytes(32)); + state.historicalRoots.push(crypto.randomBytes(32)); + state.eth1Data.depositCount = 1000; + state.eth1DataVotes.push(ssz.phase0.Eth1Data.toViewDU(ssz.phase0.Eth1Data.defaultValue())); + state.eth1DepositIndex = 1000; + state.balances.set(0, 30); + state.randaoMixes.set(0, crypto.randomBytes(32)); + state.slashings.set(0, 1n); + state.previousEpochParticipation.set(0, 0b11111110); + state.currentEpochParticipation.set(0, 0b11111110); + state.justificationBits.set(0, true); + state.previousJustifiedCheckpoint.epoch = 1; + state.currentJustifiedCheckpoint.epoch = 1; + state.finalizedCheckpoint.epoch++; + state.latestExecutionPayloadHeader.blockNumber = 1; + state.nextWithdrawalIndex = 1000; + state.nextWithdrawalValidatorIndex = 1000; + state.historicalSummaries.push(ssz.capella.HistoricalSummary.toViewDU(ssz.capella.HistoricalSummary.defaultValue())); + state.commit(); + return state; +} diff --git a/packages/types/package.json b/packages/types/package.json index 45676cc616f..20bef61d2e5 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -67,7 +67,7 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/params": "^1.11.1" }, "keywords": [ diff --git a/packages/utils/src/file.ts b/packages/utils/src/file.ts new file mode 100644 index 00000000000..c4cdf0fb099 --- /dev/null +++ b/packages/utils/src/file.ts @@ -0,0 +1,41 @@ +import fs from "node:fs"; +import {promisify} from "node:util"; + +/** Ensure a directory exists */ +export async function ensureDir(path: string): Promise { + try { + await promisify(fs.stat)(path); + } catch (_) { + // not exists + await promisify(fs.mkdir)(path, {recursive: true}); + } +} + +/** Write data to a file if it does not exist */ +export async function writeIfNotExist(filepath: string, bytes: Uint8Array): Promise { + try { + await promisify(fs.stat)(filepath); + return false; + // file exists, do nothing + } catch (_) { + // not exists + await promisify(fs.writeFile)(filepath, bytes); + return true; + } +} + +/** Remove a file if it exists */ +export async function removeFile(path: string): Promise { + try { + await promisify(fs.unlink)(path); + return true; + } catch (_) { + // may not exists + return false; + } +} + +/** Read all file names in a folder */ +export async function readAllFileNames(folderPath: string): Promise { + return promisify(fs.readdir)(folderPath); +} diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index 8e622b310c3..72c2f82579e 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -4,6 +4,7 @@ export * from "./base64.js"; export * from "./bytes.js"; export * from "./err.js"; export * from "./errors.js"; +export * from "./file.js"; export * from "./format.js"; export * from "./logger.js"; export * from "./map.js"; diff --git a/packages/validator/package.json b/packages/validator/package.json index ed1433cf48c..3a6d672fdd7 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -49,7 +49,7 @@ ], "dependencies": { "@chainsafe/bls": "7.1.1", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@lodestar/api": "^1.11.1", "@lodestar/config": "^1.11.1", "@lodestar/db": "^1.11.1", diff --git a/yarn.lock b/yarn.lock index 8d6155b15fe..98313ad955e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -644,10 +644,10 @@ "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1" -"@chainsafe/ssz@^0.13.0": - version "0.13.0" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.13.0.tgz#0bd11af6abe023d4cc24067a46889dcabbe573e5" - integrity sha512-73PF5bFXE9juLD1+dkmYV/CMO/5ip0TmyzgYw87vAn8Cn+CbwCOp/HyNNdYCmdl104a2bqcORFJzirCvvc+nNw== +"@chainsafe/ssz@^0.14.0": + version "0.14.0" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.14.0.tgz#fe9e4fd3cf673013bd57f77c3ab0fdc5ebc5d916" + integrity sha512-KTc33pWu7ItXlzMAz5/1osOHsvhx25kpM3j7Ez+PNZLyyhIoNzAhhozvxy+ul0fCDfHbvaCRp3lJQnzsb5Iv0A== dependencies: "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1"