Skip to content

Commit

Permalink
add transactionInfoTable unit test
Browse files Browse the repository at this point in the history
  • Loading branch information
DaveTeng0 committed Apr 29, 2024
1 parent fcc1e42 commit 0ef0eaf
Show file tree
Hide file tree
Showing 10 changed files with 432 additions and 139 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,28 +18,19 @@
package org.apache.hadoop.ozone.shell;

import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.scm.container.replication.ReplicationManager;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.debug.DBScanner;
import org.apache.hadoop.ozone.debug.RDBParser;
import org.apache.hadoop.ozone.om.OMStorage;
import org.apache.hadoop.ozone.repair.RDBRepair;
import org.apache.hadoop.ozone.repair.om.TransactionInfoRepair;
import org.apache.hadoop.ozone.repair.TransactionInfoRepair;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import picocli.CommandLine;

import java.io.PrintWriter;
import java.io.StringWriter;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_HEARTBEAT_INTERVAL;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PIPELINE_REPORT_INTERVAL;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_COMMAND_STATUS_REPORT_INTERVAL;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_CONTAINER_REPORT_INTERVAL;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL;
import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_NAME;
import static org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX;
import static org.assertj.core.api.Assertions.assertThat;
Expand All @@ -56,19 +47,7 @@ public class TestOzoneRepairShell {
@BeforeAll
public static void init() throws Exception {
conf = new OzoneConfiguration();
conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
100, TimeUnit.MILLISECONDS);
conf.setTimeDuration(HDDS_HEARTBEAT_INTERVAL, 1, SECONDS);
conf.setTimeDuration(HDDS_PIPELINE_REPORT_INTERVAL, 1, SECONDS);
conf.setTimeDuration(HDDS_COMMAND_STATUS_REPORT_INTERVAL, 1, SECONDS);
conf.setTimeDuration(HDDS_CONTAINER_REPORT_INTERVAL, 1, SECONDS);
ReplicationManager.ReplicationManagerConfiguration replicationConf =
conf.getObject(
ReplicationManager.ReplicationManagerConfiguration.class);
replicationConf.setInterval(Duration.ofSeconds(1));
conf.setFromObject(replicationConf);
cluster = MiniOzoneCluster.newBuilder(conf)
.build();
cluster = MiniOzoneCluster.newBuilder(conf).withoutDatanodes().build();
cluster.waitForClusterToBeReady();
}

Expand All @@ -93,6 +72,9 @@ public void testUpdateTransactionInfoTable() throws Exception {
cmdDBScanner.execute(argsDBScanner);
String cmdOut = stdout.toString();
assertThat(cmdOut).contains(testTermIndex);

cluster.getOzoneManager().start();

}

}
6 changes: 6 additions & 0 deletions hadoop-ozone/tools/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,12 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<artifactId>hdds-test-utils</artifactId>
<scope>test</scope>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.mockito</groupId>-->
<!-- <artifactId>mockito-junit-jupiter</artifactId>-->
<!-- <version>${mockito.version}</version>-->
<!-- <scope>test</scope>-->
<!-- </dependency>-->
</dependencies>
<build>
<plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public void execute(ScmClient scmClient) throws IOException {
// Throwing exception to create non-zero exit code in case of failure.
throw new IOException(errorMsg);
} else {
System.out.println("Decommissioned Scm " + nodeId);
System.out.println("****_______yyyyyyyyyy,,, Decommissioned Scm " + nodeId);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,11 @@ public String getDbPath() {
return dbPath;
}

// public void setDbPath(String dbPath) {
// this.dbPath = dbPath;
// }


@Override
public Void call() {
GenericCli.missingSubcommand(spec);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding
* copyright ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a
* copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software
* distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.repair;

import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.cli.SubcommandWithParent;
import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.hdds.utils.TransactionInfo;
import org.apache.hadoop.hdds.utils.db.StringCodec;
import org.apache.hadoop.hdds.utils.db.managed.ManagedRocksDB;
import org.apache.hadoop.ozone.debug.RocksDBUtils;
import org.kohsuke.MetaInfServices;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;

import static org.apache.hadoop.ozone.OzoneConsts.TRANSACTION_INFO_KEY;
import static org.apache.hadoop.ozone.om.OmMetadataManagerImpl.TRANSACTION_INFO_TABLE;


/**
* Tool to update the highest term-index in transactionInfoTable.
*/
@CommandLine.Command(
name = "transaction",
description = "CLI to update the highest index in transactionInfoTable.",
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class
)
@MetaInfServices(SubcommandWithParent.class)
public class TransactionInfoRepair
implements Callable<Void>, SubcommandWithParent {

protected static final Logger LOG = LoggerFactory.getLogger(TransactionInfoRepair.class);

@CommandLine.Spec
private static CommandLine.Model.CommandSpec spec;

@CommandLine.ParentCommand
private RDBRepair parent;

@CommandLine.Option(names = {"--highest-transaction"},
required = true,
description = "Highest termIndex of transactionInfoTable. The input format is: {term}#{index}.")
private String highestTransactionTermIndex;


protected void setHighestTransactionTermIndex(
String highestTransactionTermIndex) {
this.highestTransactionTermIndex = highestTransactionTermIndex;
}

@Override
public Void call() throws Exception {
List<ColumnFamilyHandle> cfHandleList = new ArrayList<>();
List<ColumnFamilyDescriptor> cfDescList = RocksDBUtils.getColumnFamilyDescriptors(
getParent().getDbPath());

try {
ManagedRocksDB db = ManagedRocksDB.open(getParent().getDbPath(), cfDescList, cfHandleList);
try {
// (ManagedRocksDB db = getManagedRocksDB(cfDescList, cfHandleList))
// {
ColumnFamilyHandle transactionInfoCfh = RocksDBUtils
.getColumnFamilyHandle(TRANSACTION_INFO_TABLE, cfHandleList);
ManagedRocksDB db2 = db;
if (transactionInfoCfh == null) {
System.err.println(TRANSACTION_INFO_TABLE + " is not in a column family in DB for the given path.");
return null;
}
TransactionInfo originalTransactionInfo =
RocksDBUtils.getValue(db, transactionInfoCfh, TRANSACTION_INFO_KEY, TransactionInfo.getCodec());

System.out.println("The original highest transaction Info was " + originalTransactionInfo.getTermIndex());

TransactionInfo transactionInfo = TransactionInfo.valueOf(
highestTransactionTermIndex);

byte[] transactionInfoBytes = TransactionInfo.getCodec().toPersistedFormat(transactionInfo);
db.get()
.put(transactionInfoCfh, StringCodec.get().toPersistedFormat(TRANSACTION_INFO_KEY), transactionInfoBytes);

System.out.println("The highest transaction info has been updated to: " +
RocksDBUtils.getValue(db, transactionInfoCfh, TRANSACTION_INFO_KEY,
TransactionInfo.getCodec()).getTermIndex());
// LOG.error("******_________ pppppppppppp.....");


} catch (RocksDBException exception) {
System.err.println("Failed to update the RocksDB for the given path: " + getParent().getDbPath());
System.err.println(
"Make sure that Ozone entity (OM, SCM or DN) is not running for the give database path and current host.");
LOG.error(exception.toString());
// throw exception;
} finally {
IOUtils.closeQuietly(cfHandleList);
}

} catch (Exception e) {
throw e;
}

return null;
}

protected RDBRepair getParent() {
return parent;
}

@Override
public Class<?> getParentType() {
return RDBRepair.class;
}

protected ManagedRocksDB getManagedRocksDB(List<ColumnFamilyDescriptor> cfDescList,
List<ColumnFamilyHandle> cfHandleList) throws RocksDBException {
return ManagedRocksDB.open(getParent().getDbPath(), cfDescList, cfHandleList);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.Model.CommandSpec;

Expand All @@ -56,6 +58,8 @@
@MetaInfServices(SubcommandWithParent.class)
public class SnapshotRepair implements Callable<Void>, SubcommandWithParent {

protected static final Logger LOG = LoggerFactory.getLogger(SnapshotRepair.class);

@CommandLine.Spec
private static CommandSpec spec;

Expand Down Expand Up @@ -152,7 +156,7 @@ public Void call() throws Exception {
System.err.println("Failed to update the RocksDB for the given path: " + parent.getDbPath());
System.err.println(
"Make sure that Ozone entity (OM, SCM or DN) is not running for the give dbPath and current host.");
System.err.println(exception);
LOG.error(exception.toString());
} finally {
IOUtils.closeQuietly(cfHandleList);
}
Expand Down

This file was deleted.

Loading

0 comments on commit 0ef0eaf

Please sign in to comment.