Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: v1.23.1 release #7262

Merged
merged 6 commits into from
Nov 29, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion lerna.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
],
"npmClient": "yarn",
"useNx": true,
"version": "1.23.0",
"version": "1.23.1",
"stream": true,
"command": {
"version": {
Expand Down
10 changes: 5 additions & 5 deletions packages/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"bugs": {
"url": "https://github.com/ChainSafe/lodestar/issues"
},
"version": "1.23.0",
"version": "1.23.1",
"type": "module",
"exports": {
".": {
Expand Down Expand Up @@ -72,10 +72,10 @@
"dependencies": {
"@chainsafe/persistent-merkle-tree": "^0.8.0",
"@chainsafe/ssz": "^0.18.0",
"@lodestar/config": "^1.23.0",
"@lodestar/params": "^1.23.0",
"@lodestar/types": "^1.23.0",
"@lodestar/utils": "^1.23.0",
"@lodestar/config": "^1.23.1",
"@lodestar/params": "^1.23.1",
"@lodestar/types": "^1.23.1",
"@lodestar/utils": "^1.23.1",
"eventsource": "^2.0.2",
"qs": "^6.11.1"
},
Expand Down
26 changes: 13 additions & 13 deletions packages/beacon-node/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"bugs": {
"url": "https://github.com/ChainSafe/lodestar/issues"
},
"version": "1.23.0",
"version": "1.23.1",
"type": "module",
"exports": {
".": {
Expand Down Expand Up @@ -120,18 +120,18 @@
"@libp2p/peer-id-factory": "^4.1.0",
"@libp2p/prometheus-metrics": "^3.0.21",
"@libp2p/tcp": "9.0.23",
"@lodestar/api": "^1.23.0",
"@lodestar/config": "^1.23.0",
"@lodestar/db": "^1.23.0",
"@lodestar/fork-choice": "^1.23.0",
"@lodestar/light-client": "^1.23.0",
"@lodestar/logger": "^1.23.0",
"@lodestar/params": "^1.23.0",
"@lodestar/reqresp": "^1.23.0",
"@lodestar/state-transition": "^1.23.0",
"@lodestar/types": "^1.23.0",
"@lodestar/utils": "^1.23.0",
"@lodestar/validator": "^1.23.0",
"@lodestar/api": "^1.23.1",
"@lodestar/config": "^1.23.1",
"@lodestar/db": "^1.23.1",
"@lodestar/fork-choice": "^1.23.1",
"@lodestar/light-client": "^1.23.1",
"@lodestar/logger": "^1.23.1",
"@lodestar/params": "^1.23.1",
"@lodestar/reqresp": "^1.23.1",
"@lodestar/state-transition": "^1.23.1",
"@lodestar/types": "^1.23.1",
"@lodestar/utils": "^1.23.1",
"@lodestar/validator": "^1.23.1",
"@multiformats/multiaddr": "^12.1.3",
"c-kzg": "^2.1.2",
"datastore-core": "^9.1.1",
Expand Down
44 changes: 33 additions & 11 deletions packages/beacon-node/src/chain/archiver/archiveBlocks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ export async function archiveBlocks(
});

if (finalizedPostDeneb) {
await migrateBlobSidecarsFromHotToColdDb(config, db, finalizedCanonicalBlockRoots);
logger.verbose("Migrated blobSidecars from hot DB to cold DB");
const migrate = await migrateBlobSidecarsFromHotToColdDb(config, db, finalizedCanonicalBlockRoots, currentEpoch);
logger.verbose(migrate ? "Migrated blobSidecars from hot DB to cold DB" : "Skip blobSidecars migration");
}
}

Expand Down Expand Up @@ -157,22 +157,36 @@ async function migrateBlocksFromHotToColdDb(db: IBeaconDb, blocks: BlockRootSlot
}
}

/**
* Migrate blobSidecars from hot db to cold db.
* @returns true if we do that, false if block is out of range data.
*/
async function migrateBlobSidecarsFromHotToColdDb(
config: ChainForkConfig,
db: IBeaconDb,
blocks: BlockRootSlot[]
): Promise<void> {
blocks: BlockRootSlot[],
currentEpoch: Epoch
): Promise<boolean> {
let result = false;

for (let i = 0; i < blocks.length; i += BLOB_SIDECAR_BATCH_SIZE) {
const toIdx = Math.min(i + BLOB_SIDECAR_BATCH_SIZE, blocks.length);
const canonicalBlocks = blocks.slice(i, toIdx);

// processCanonicalBlocks
if (canonicalBlocks.length === 0) return;
if (canonicalBlocks.length === 0) return false;

// load Buffer instead of ssz deserialized to improve performance
const canonicalBlobSidecarsEntries: KeyValue<Slot, Uint8Array>[] = await Promise.all(
canonicalBlocks
.filter((block) => config.getForkSeq(block.slot) >= ForkSeq.deneb)
.filter((block) => {
const blockSlot = block.slot;
const blockEpoch = computeEpochAtSlot(blockSlot);
return (
config.getForkSeq(blockSlot) >= ForkSeq.deneb &&
blockEpoch >= currentEpoch - config.MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS
);
})
.map(async (block) => {
const bytes = await db.blobSidecars.getBinary(block.root);
if (!bytes) {
Expand All @@ -182,12 +196,20 @@ async function migrateBlobSidecarsFromHotToColdDb(
})
);

// put to blockArchive db and delete block db
await Promise.all([
db.blobSidecarsArchive.batchPutBinary(canonicalBlobSidecarsEntries),
db.blobSidecars.batchDelete(canonicalBlocks.map((block) => block.root)),
]);
const migrate = canonicalBlobSidecarsEntries.length > 0;

if (migrate) {
// put to blockArchive db and delete block db
await Promise.all([
db.blobSidecarsArchive.batchPutBinary(canonicalBlobSidecarsEntries),
db.blobSidecars.batchDelete(canonicalBlocks.map((block) => block.root)),
]);
}

result = result || migrate;
}

return result;
}

/**
Expand Down
1 change: 0 additions & 1 deletion packages/beacon-node/src/chain/chain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,6 @@ export class BeaconChain implements IBeaconChain {
metrics,
logger,
clock,
shufflingCache: this.shufflingCache,
blockStateCache,
bufferPool: this.bufferPool,
datastore: fileDataStore
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import {loadCachedBeaconState} from "@lodestar/state-transition";
import {INTERVALS_PER_SLOT} from "@lodestar/params";
import {Metrics} from "../../metrics/index.js";
import {IClock} from "../../util/clock.js";
import {ShufflingCache} from "../shufflingCache.js";
import {AllocSource, BufferPool, BufferWithKey} from "../../util/bufferPool.js";
import {StateCloneOpts} from "../regen/interface.js";
import {serializeState} from "../serializeState.js";
Expand All @@ -17,16 +16,13 @@ import {CheckpointHex, CacheItemType, CheckpointStateCache, BlockStateCache} fro
export type PersistentCheckpointStateCacheOpts = {
/** Keep max n states in memory, persist the rest to disk */
maxCPStateEpochsInMemory?: number;
/** for testing only */
processLateBlock?: boolean;
};

type PersistentCheckpointStateCacheModules = {
metrics?: Metrics | null;
logger: Logger;
clock?: IClock | null;
signal?: AbortSignal;
shufflingCache: ShufflingCache;
datastore: CPStateDatastore;
blockStateCache: BlockStateCache;
bufferPool?: BufferPool | null;
Expand Down Expand Up @@ -102,24 +98,12 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache {
private preComputedCheckpoint: string | null = null;
private preComputedCheckpointHits: number | null = null;
private readonly maxEpochsInMemory: number;
// only for testing, default false for production
private readonly processLateBlock: boolean;
private readonly datastore: CPStateDatastore;
private readonly shufflingCache: ShufflingCache;
private readonly blockStateCache: BlockStateCache;
private readonly bufferPool?: BufferPool | null;

constructor(
{
metrics,
logger,
clock,
signal,
shufflingCache,
datastore,
blockStateCache,
bufferPool,
}: PersistentCheckpointStateCacheModules,
{metrics, logger, clock, signal, datastore, blockStateCache, bufferPool}: PersistentCheckpointStateCacheModules,
opts: PersistentCheckpointStateCacheOpts
) {
this.cache = new MapTracker(metrics?.cpStateCache);
Expand Down Expand Up @@ -153,10 +137,8 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache {
throw new Error("maxEpochsInMemory must be >= 0");
}
this.maxEpochsInMemory = opts.maxCPStateEpochsInMemory ?? DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY;
this.processLateBlock = opts.processLateBlock ?? false;
// Specify different datastore for testing
this.datastore = datastore;
this.shufflingCache = shufflingCache;
this.blockStateCache = blockStateCache;
this.bufferPool = bufferPool;
}
Expand All @@ -169,12 +151,11 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache {
await this.datastore.init();
}
const persistedKeys = await this.datastore.readKeys();
for (const persistedKey of persistedKeys) {
const cp = datastoreKeyToCheckpoint(persistedKey);
this.cache.set(toCacheKey(cp), {type: CacheItemType.persisted, value: persistedKey});
this.epochIndex.getOrDefault(cp.epoch).add(toRootHex(cp.root));
}
this.logger.info("Loaded persisted checkpoint states from the last run", {
// all checkpoint states from the last run are not trusted, remove them
// otherwise if we have a bad checkpoint state from the last run, the node get stucked
// this was found during mekong devnet, see https://github.com/ChainSafe/lodestar/pull/7255
await Promise.all(persistedKeys.map((key) => this.datastore.remove(key)));
this.logger.info("Removed persisted checkpoint states from the last run", {
count: persistedKeys.length,
maxEpochsInMemory: this.maxEpochsInMemory,
});
Expand Down Expand Up @@ -487,12 +468,9 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache {
// 2/3 of slot is the most free time of every slot, take that chance to persist checkpoint states
// normally it should only persist checkpoint states at 2/3 of slot 0 of epoch
await sleep(secToTwoThirdsSlot * 1000, this.signal);
} else if (!this.processLateBlock) {
// normally the block persist happens at 2/3 of slot 0 of epoch, if it's already late then just skip to allow other tasks to run
// there are plenty of chances in the same epoch to persist checkpoint states, also if block is late it could be reorged
this.logger.verbose("Skip persist checkpoint states", {blockSlot, root: blockRootHex});
return 0;
}
// at syncing time, it's critical to persist checkpoint states as soon as possible to avoid OOM during unfinality time
// if node is synced this is not a hot time because block comes late, we'll likely miss attestation already, or the block is orphaned

const persistEpochs = sortedEpochs.slice(0, sortedEpochs.length - this.maxEpochsInMemory);
for (const lowestEpoch of persistEpochs) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,9 @@ export async function beaconBlocksMaybeBlobsByRange(
return blocks.map((block) => getBlockInput.preData(config, block.data, BlockSource.byRange, block.bytes));
}

// From Deneb
// Only request blobs if they are recent enough
if (computeEpochAtSlot(startSlot) >= currentEpoch - config.MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS) {
if (startEpoch >= currentEpoch - config.MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS) {
const [allBlocks, allBlobSidecars] = await Promise.all([
network.sendBeaconBlocksByRange(peerId, request),
network.sendBlobSidecarsByRange(peerId, request),
Expand All @@ -46,8 +47,9 @@ export async function beaconBlocksMaybeBlobsByRange(
return matchBlockWithBlobs(config, allBlocks, allBlobSidecars, endSlot, BlockSource.byRange, BlobsSource.byRange);
}

// Post Deneb but old blobs
throw Error("Cannot sync blobs outside of blobs prune window");
// Data is out of range, only request blocks
const blocks = await network.sendBeaconBlocksByRange(peerId, request);
return blocks.map((block) => getBlockInput.outOfRangeData(config, block.data, BlockSource.byRange, block.bytes));
}

// Assumes that the blobs are in the same sequence as blocks, doesn't require block to be sorted
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,9 @@ describe("PersistentCheckpointStateCache", () => {
{
datastore,
logger: testLogger(),
shufflingCache: new ShufflingCache(),
blockStateCache: new FIFOBlockStateCache({}, {}),
},
{maxCPStateEpochsInMemory: 2, processLateBlock: true}
{maxCPStateEpochsInMemory: 2}
);
cache.add(cp0a, states["cp0a"]);
cache.add(cp0b, states["cp0b"]);
Expand Down Expand Up @@ -165,10 +164,9 @@ describe("PersistentCheckpointStateCache", () => {
{
datastore,
logger: testLogger(),
shufflingCache: new ShufflingCache(),
blockStateCache: new FIFOBlockStateCache({}, {}),
},
{maxCPStateEpochsInMemory: 2, processLateBlock: true}
{maxCPStateEpochsInMemory: 2}
);
cache.add(cp0a, states["cp0a"]);
cache.add(cp0b, states["cp0b"]);
Expand Down Expand Up @@ -242,10 +240,9 @@ describe("PersistentCheckpointStateCache", () => {
{
datastore,
logger: testLogger(),
shufflingCache: new ShufflingCache(),
blockStateCache: new FIFOBlockStateCache({}, {}),
},
{maxCPStateEpochsInMemory: 2, processLateBlock: true}
{maxCPStateEpochsInMemory: 2}
);
cache.add(cp0a, states["cp0a"]);
cache.add(cp0b, states["cp0b"]);
Expand Down Expand Up @@ -548,10 +545,9 @@ describe("PersistentCheckpointStateCache", () => {
{
datastore,
logger: testLogger(),
shufflingCache: new ShufflingCache(),
blockStateCache: new FIFOBlockStateCache({}, {}),
},
{maxCPStateEpochsInMemory: 1, processLateBlock: true}
{maxCPStateEpochsInMemory: 1}
);
cache.add(cp0a, states["cp0a"]);
cache.add(cp0b, states["cp0b"]);
Expand Down Expand Up @@ -820,10 +816,9 @@ describe("PersistentCheckpointStateCache", () => {
{
datastore,
logger: testLogger(),
shufflingCache: new ShufflingCache(),
blockStateCache: new FIFOBlockStateCache({}, {}),
},
{maxCPStateEpochsInMemory: 0, processLateBlock: true}
{maxCPStateEpochsInMemory: 0}
);
cache.add(cp0a, states["cp0a"]);
cache.add(cp0b, states["cp0b"]);
Expand Down Expand Up @@ -911,10 +906,9 @@ describe("PersistentCheckpointStateCache", () => {
{
datastore,
logger: testLogger(),
shufflingCache: new ShufflingCache(),
blockStateCache: new FIFOBlockStateCache({}, {}),
},
{maxCPStateEpochsInMemory: 0, processLateBlock: true}
{maxCPStateEpochsInMemory: 0}
);

const root1a = Buffer.alloc(32, 100);
Expand Down
26 changes: 13 additions & 13 deletions packages/cli/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@chainsafe/lodestar",
"version": "1.23.0",
"version": "1.23.1",
"description": "Command line interface for lodestar",
"author": "ChainSafe Systems",
"license": "LGPL-3.0",
Expand Down Expand Up @@ -62,17 +62,17 @@
"@libp2p/crypto": "^4.1.0",
"@libp2p/peer-id": "^4.1.0",
"@libp2p/peer-id-factory": "^4.1.0",
"@lodestar/api": "^1.23.0",
"@lodestar/beacon-node": "^1.23.0",
"@lodestar/config": "^1.23.0",
"@lodestar/db": "^1.23.0",
"@lodestar/light-client": "^1.23.0",
"@lodestar/logger": "^1.23.0",
"@lodestar/params": "^1.23.0",
"@lodestar/state-transition": "^1.23.0",
"@lodestar/types": "^1.23.0",
"@lodestar/utils": "^1.23.0",
"@lodestar/validator": "^1.23.0",
"@lodestar/api": "^1.23.1",
"@lodestar/beacon-node": "^1.23.1",
"@lodestar/config": "^1.23.1",
"@lodestar/db": "^1.23.1",
"@lodestar/light-client": "^1.23.1",
"@lodestar/logger": "^1.23.1",
"@lodestar/params": "^1.23.1",
"@lodestar/state-transition": "^1.23.1",
"@lodestar/types": "^1.23.1",
"@lodestar/utils": "^1.23.1",
"@lodestar/validator": "^1.23.1",
"@multiformats/multiaddr": "^12.1.3",
"deepmerge": "^4.3.1",
"ethers": "^6.7.0",
Expand All @@ -88,7 +88,7 @@
"yargs": "^17.7.1"
},
"devDependencies": {
"@lodestar/test-utils": "^1.23.0",
"@lodestar/test-utils": "^1.23.1",
"@types/debug": "^4.1.7",
"@types/got": "^9.6.12",
"@types/inquirer": "^9.0.3",
Expand Down
Loading
Loading