Skip to content

Commit

Permalink
Print TRIE_LOG_STORAGE usage before and after prune
Browse files Browse the repository at this point in the history
Signed-off-by: Simon Dudley <[email protected]>
  • Loading branch information
siladu committed Oct 10, 2023
1 parent e197c5c commit fba6eeb
Show file tree
Hide file tree
Showing 3 changed files with 147 additions and 78 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,13 @@
import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;

import org.hyperledger.besu.cli.util.VersionProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;

import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;

import org.bouncycastle.util.Arrays;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ColumnFamilyMetaData;
import org.rocksdb.Options;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
Expand Down Expand Up @@ -88,7 +85,7 @@ public void run() {
options.setCreateIfMissing(true);

// Open the RocksDB database with multiple column families
List<byte[]> cfNames = null;
List<byte[]> cfNames;
try {
cfNames = RocksDB.listColumnFamilies(options, dbPath);
} catch (RocksDBException e) {
Expand All @@ -99,55 +96,9 @@ public void run() {
for (byte[] cfName : cfNames) {
cfDescriptors.add(new ColumnFamilyDescriptor(cfName));
}
boolean emptyColumnFamily;
try (final RocksDB rocksdb = RocksDB.openReadOnly(dbPath, cfDescriptors, cfHandles)) {
for (int i = 0; i < cfNames.size(); i++) {
emptyColumnFamily = false;
byte[] cfName = cfNames.get(i);
ColumnFamilyHandle cfHandle = cfHandles.get(i);
String size = rocksdb.getProperty(cfHandle, "rocksdb.estimate-live-data-size");
if (!size.isEmpty() && !size.isBlank()) {
long sizeLong = Long.parseLong(size);
if (sizeLong == 0) emptyColumnFamily = true;
if (!emptyColumnFamily) {
out.println(
"****** Column family '"
+ getNameById(cfName)
+ "' size: "
+ formatOutputSize(sizeLong)
+ " ******");
// System.out.println("SST table : "+ rocksdb.getProperty(cfHandle,
// "rocksdb.sstables"));

out.println(
"Number of live snapshots : "
+ rocksdb.getProperty(cfHandle, "rocksdb.num-snapshots"));
out.println(
"Number of keys : " + rocksdb.getProperty(cfHandle, "rocksdb.estimate-num-keys"));

String totolSstFilesSize =
rocksdb.getProperty(cfHandle, "rocksdb.total-sst-files-size");
if (!totolSstFilesSize.isEmpty() && !totolSstFilesSize.isBlank()) {
out.println(
"Total size of SST Files : "
+ formatOutputSize(Long.parseLong(totolSstFilesSize)));
}
String liveSstFilesSize =
rocksdb.getProperty(cfHandle, "rocksdb.live-sst-files-size");
if (!liveSstFilesSize.isEmpty() && !liveSstFilesSize.isBlank()) {
out.println(
"Size of live SST Filess : "
+ formatOutputSize(Long.parseLong(liveSstFilesSize)));
}

ColumnFamilyMetaData columnFamilyMetaData = rocksdb.getColumnFamilyMetaData(cfHandle);
long sizeBytes = columnFamilyMetaData.size();
out.println(
"Column family size (with getColumnFamilyMetaData) : "
+ formatOutputSize(sizeBytes));
out.println("");
}
}
for (ColumnFamilyHandle cfHandle : cfHandles) {
RocksDbUsageHelper.printUsageForColumnFamily(rocksdb, cfHandle, out);
}
} catch (RocksDBException e) {
throw new RuntimeException(e);
Expand All @@ -157,29 +108,5 @@ public void run() {
}
}
}

private static String formatOutputSize(final long size) {
if (size > (1024 * 1024 * 1024)) {
long sizeInGiB = size / (1024 * 1024 * 1024);
return sizeInGiB + " GiB";
} else if (size > (1024 * 1024)) {
long sizeInMiB = size / (1024 * 1024);
return sizeInMiB + " MiB";
} else if (size > 1024) {
long sizeInKiB = size / 1024;
return sizeInKiB + " KiB";
} else {
return size + " B";
}
}

public static String getNameById(final byte[] id) {
for (KeyValueSegmentIdentifier segment : KeyValueSegmentIdentifier.values()) {
if (Arrays.areEqual(segment.getId(), id)) {
return segment.getName();
}
}
return null; // id not found
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/

package org.hyperledger.besu.cli.subcommands.operator;

import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;

import java.io.PrintWriter;

import org.bouncycastle.util.Arrays;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ColumnFamilyMetaData;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;

public class RocksDbUsageHelper {

static void printUsageForColumnFamily(
final RocksDB rocksdb, final ColumnFamilyHandle cfHandle, final PrintWriter out)
throws RocksDBException {
String size = rocksdb.getProperty(cfHandle, "rocksdb.estimate-live-data-size");
boolean emptyColumnFamily = false;
if (!size.isEmpty() && !size.isBlank()) {
long sizeLong = Long.parseLong(size);
if (sizeLong == 0) emptyColumnFamily = true;
if (!emptyColumnFamily) {
out.println(
"****** Column family '"
+ getNameById(cfHandle.getName())
+ "' size: "
+ formatOutputSize(sizeLong)
+ " ******");
// System.out.println("SST table : "+ rocksdb.getProperty(cfHandle,
// "rocksdb.sstables"));

out.println(
"Number of live snapshots : " + rocksdb.getProperty(cfHandle, "rocksdb.num-snapshots"));
out.println(
"Number of keys : " + rocksdb.getProperty(cfHandle, "rocksdb.estimate-num-keys"));

String totolSstFilesSize = rocksdb.getProperty(cfHandle, "rocksdb.total-sst-files-size");
if (!totolSstFilesSize.isEmpty() && !totolSstFilesSize.isBlank()) {
out.println(
"Total size of SST Files : " + formatOutputSize(Long.parseLong(totolSstFilesSize)));
}
String liveSstFilesSize = rocksdb.getProperty(cfHandle, "rocksdb.live-sst-files-size");
if (!liveSstFilesSize.isEmpty() && !liveSstFilesSize.isBlank()) {
out.println(
"Size of live SST Filess : " + formatOutputSize(Long.parseLong(liveSstFilesSize)));
}

ColumnFamilyMetaData columnFamilyMetaData = rocksdb.getColumnFamilyMetaData(cfHandle);
long sizeBytes = columnFamilyMetaData.size();
out.println(
"Column family size (with getColumnFamilyMetaData) : " + formatOutputSize(sizeBytes));
out.println("");
}
}
}

private static String formatOutputSize(final long size) {
if (size > (1024 * 1024 * 1024)) {
long sizeInGiB = size / (1024 * 1024 * 1024);
return sizeInGiB + " GiB";
} else if (size > (1024 * 1024)) {
long sizeInMiB = size / (1024 * 1024);
return sizeInMiB + " MiB";
} else if (size > 1024) {
long sizeInKiB = size / 1024;
return sizeInKiB + " KiB";
} else {
return size + " B";
}
}

public static String getNameById(final byte[] id) {
for (KeyValueSegmentIdentifier segment : KeyValueSegmentIdentifier.values()) {
if (Arrays.areEqual(segment.getId(), id)) {
return segment.getName();
}
}
return null; // id not found
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

import static com.google.common.base.Preconditions.checkNotNull;
import static org.hyperledger.besu.cli.DefaultCommandValues.MANDATORY_LONG_FORMAT_HELP;
import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;
import static org.hyperledger.besu.ethereum.bonsai.trielog.AbstractTrieLogManager.LOG_RANGE_LIMIT;

import org.hyperledger.besu.cli.util.VersionProvider;
Expand All @@ -31,6 +32,7 @@
import org.hyperledger.besu.plugin.services.trielogs.TrieLog;

import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
Expand All @@ -40,6 +42,11 @@

import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.Options;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
Expand Down Expand Up @@ -390,9 +397,11 @@ static class PruneTrieLog implements Runnable {

@Override
public void run() {
checkNotNull(parentCommand);

final PrintWriter out = spec.commandLine().getOut();

checkNotNull(parentCommand);
printTrieLogDiskUsage(out);

besuController = parentCommand.createBesuController();
final MutableBlockchain blockchain = besuController.getProtocolContext().getBlockchain();
Expand Down Expand Up @@ -427,7 +436,9 @@ public void run() {
LOG.atInfo().setMessage("Retain {}").addArgument(hash::toHexString).log();
}
});
out.printf("Pruned %d trie logs", prunedCount.get());
out.printf("Pruned %d trie logs\n", prunedCount.get());

printTrieLogDiskUsage(out);
} else {
out.println("Please specify --belowBlockNumber");
}
Expand All @@ -441,6 +452,41 @@ public void run() {
}
}

private void printTrieLogDiskUsage(final PrintWriter out) {

final String dbPath =
parentCommand
.parentCommand
.parentCommand
.dataDir()
.toString()
.concat("/")
.concat(DATABASE_PATH);

RocksDB.loadLibrary();
Options options = new Options();
options.setCreateIfMissing(true);

List<ColumnFamilyHandle> cfHandles = new ArrayList<>();
List<ColumnFamilyDescriptor> cfDescriptors = new ArrayList<>();
cfDescriptors.add(new ColumnFamilyDescriptor(RocksDB.DEFAULT_COLUMN_FAMILY));
cfDescriptors.add(
new ColumnFamilyDescriptor(KeyValueSegmentIdentifier.TRIE_LOG_STORAGE.getId()));
try (final RocksDB rocksdb = RocksDB.openReadOnly(dbPath, cfDescriptors, cfHandles)) {
for (ColumnFamilyHandle cfHandle : cfHandles) {
RocksDbUsageHelper.printUsageForColumnFamily(rocksdb, cfHandle, out);
}
} catch (RocksDBException e) {
LOG.error("TODO SLD ", e);
e.printStackTrace();
throw new RuntimeException(e);
} finally {
for (ColumnFamilyHandle cfHandle : cfHandles) {
cfHandle.close();
}
}
}

private void recordResult(
final boolean success, final AtomicInteger prunedCount, final Hash hash) {
if (success) {
Expand Down

0 comments on commit fba6eeb

Please sign in to comment.