HDFS-12673. Ozone: Log elapsed time for block deletion tasks. Contributed by Yiqun Lin.
This commit is contained in:
parent
b1e2704a3a
commit
0f0417a12c
@ -201,7 +201,7 @@ public BackgroundTaskResult call() throws Exception {
|
|||||||
File chunkFile = dataDir.toPath()
|
File chunkFile = dataDir.toPath()
|
||||||
.resolve(chunkInfo.getChunkName()).toFile();
|
.resolve(chunkInfo.getChunkName()).toFile();
|
||||||
if (FileUtils.deleteQuietly(chunkFile)) {
|
if (FileUtils.deleteQuietly(chunkFile)) {
|
||||||
LOG.info("block {} chunk {} deleted", blockName,
|
LOG.debug("block {} chunk {} deleted", blockName,
|
||||||
chunkFile.getAbsolutePath());
|
chunkFile.getAbsolutePath());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
|
import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
|
||||||
import org.apache.hadoop.ozone.common.BlockGroup;
|
import org.apache.hadoop.ozone.common.BlockGroup;
|
||||||
import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol;
|
import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol;
|
||||||
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.utils.BackgroundService;
|
import org.apache.hadoop.utils.BackgroundService;
|
||||||
import org.apache.hadoop.utils.BackgroundTask;
|
import org.apache.hadoop.utils.BackgroundTask;
|
||||||
import org.apache.hadoop.utils.BackgroundTaskQueue;
|
import org.apache.hadoop.utils.BackgroundTaskQueue;
|
||||||
@ -91,6 +92,7 @@ public int getPriority() {
|
|||||||
@Override
|
@Override
|
||||||
public BackgroundTaskResult call() throws Exception {
|
public BackgroundTaskResult call() throws Exception {
|
||||||
try {
|
try {
|
||||||
|
long startTime = Time.monotonicNow();
|
||||||
List<BlockGroup> keyBlocksList = manager
|
List<BlockGroup> keyBlocksList = manager
|
||||||
.getPendingDeletionKeys(keyLimitPerTask);
|
.getPendingDeletionKeys(keyLimitPerTask);
|
||||||
if (keyBlocksList.size() > 0) {
|
if (keyBlocksList.size() > 0) {
|
||||||
@ -102,7 +104,7 @@ public BackgroundTaskResult call() throws Exception {
|
|||||||
try {
|
try {
|
||||||
// Purge key from KSM DB.
|
// Purge key from KSM DB.
|
||||||
manager.deletePendingDeletionKey(result.getObjectKey());
|
manager.deletePendingDeletionKey(result.getObjectKey());
|
||||||
LOG.info("Key {} deleted from KSM DB", result.getObjectKey());
|
LOG.debug("Key {} deleted from KSM DB", result.getObjectKey());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
// if a pending deletion key is failed to delete,
|
// if a pending deletion key is failed to delete,
|
||||||
// print a warning here and retain it in this state,
|
// print a warning here and retain it in this state,
|
||||||
@ -118,6 +120,13 @@ public BackgroundTaskResult call() throws Exception {
|
|||||||
String.join(",", result.getFailedBlocks()));
|
String.join(",", result.getFailedBlocks()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!results.isEmpty()) {
|
||||||
|
LOG.info("Number of key deleted from KSM DB: {},"
|
||||||
|
+ " task elapsed time: {}ms",
|
||||||
|
results.size(), Time.monotonicNow() - startTime);
|
||||||
|
}
|
||||||
|
|
||||||
return results::size;
|
return results::size;
|
||||||
} else {
|
} else {
|
||||||
LOG.debug("No pending deletion key found in KSM");
|
LOG.debug("No pending deletion key found in KSM");
|
||||||
|
@ -24,6 +24,7 @@
|
|||||||
import org.apache.hadoop.ozone.scm.container.Mapping;
|
import org.apache.hadoop.ozone.scm.container.Mapping;
|
||||||
import org.apache.hadoop.ozone.scm.node.NodeManager;
|
import org.apache.hadoop.ozone.scm.node.NodeManager;
|
||||||
import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
|
import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
|
||||||
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.utils.BackgroundService;
|
import org.apache.hadoop.utils.BackgroundService;
|
||||||
import org.apache.hadoop.utils.BackgroundTask;
|
import org.apache.hadoop.utils.BackgroundTask;
|
||||||
import org.apache.hadoop.utils.BackgroundTaskQueue;
|
import org.apache.hadoop.utils.BackgroundTaskQueue;
|
||||||
@ -98,28 +99,39 @@ public int getPriority() {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public EmptyTaskResult call() throws Exception {
|
public EmptyTaskResult call() throws Exception {
|
||||||
|
int dnTxCount = 0;
|
||||||
|
long startTime = Time.monotonicNow();
|
||||||
// Scan SCM DB in HB interval and collect a throttled list of
|
// Scan SCM DB in HB interval and collect a throttled list of
|
||||||
// to delete blocks.
|
// to delete blocks.
|
||||||
LOG.debug("Running DeletedBlockTransactionScanner");
|
LOG.debug("Running DeletedBlockTransactionScanner");
|
||||||
DatanodeDeletedBlockTransactions transactions =
|
DatanodeDeletedBlockTransactions transactions =
|
||||||
getToDeleteContainerBlocks();
|
getToDeleteContainerBlocks();
|
||||||
if (transactions != null && !transactions.isEmpty()) {
|
if (transactions != null && !transactions.isEmpty()) {
|
||||||
transactions.getDatanodes().forEach(datanodeID -> {
|
for (DatanodeID datanodeID : transactions.getDatanodes()) {
|
||||||
List<DeletedBlocksTransaction> dnTXs =
|
List<DeletedBlocksTransaction> dnTXs = transactions
|
||||||
transactions.getDatanodeTransactions(datanodeID);
|
.getDatanodeTransactions(datanodeID);
|
||||||
|
dnTxCount += dnTXs.size();
|
||||||
// TODO commandQueue needs a cap.
|
// TODO commandQueue needs a cap.
|
||||||
// We should stop caching new commands if num of un-processed
|
// We should stop caching new commands if num of un-processed
|
||||||
// command is bigger than a limit, e.g 50. In case datanode goes
|
// command is bigger than a limit, e.g 50. In case datanode goes
|
||||||
// offline for sometime, the cached commands be flooded.
|
// offline for sometime, the cached commands be flooded.
|
||||||
nodeManager.addDatanodeCommand(datanodeID,
|
nodeManager.addDatanodeCommand(datanodeID,
|
||||||
new DeleteBlocksCommand(dnTXs));
|
new DeleteBlocksCommand(dnTXs));
|
||||||
LOG.info("Added delete block command for datanode {} in the queue,"
|
LOG.debug(
|
||||||
|
"Added delete block command for datanode {} in the queue,"
|
||||||
+ " number of delete block transactions: {}, TxID list: {}",
|
+ " number of delete block transactions: {}, TxID list: {}",
|
||||||
datanodeID, dnTXs.size(),
|
datanodeID, dnTXs.size(),
|
||||||
String.join(",", transactions.getTransactionIDList(datanodeID)));
|
String.join(",", transactions.getTransactionIDList(datanodeID)));
|
||||||
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (dnTxCount > 0) {
|
||||||
|
LOG.info("Totally added {} delete blocks command for"
|
||||||
|
+ " {} datanodes, task elapsed time: {}ms",
|
||||||
|
dnTxCount, transactions.getDatanodes().size(),
|
||||||
|
Time.monotonicNow() - startTime);
|
||||||
|
}
|
||||||
|
|
||||||
return EmptyTaskResult.newResult();
|
return EmptyTaskResult.newResult();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user