From 64c50d9dfb2247852b9e03fd3e41ce426f872e94 Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Wed, 2 Apr 2014 08:33:25 +0000 Subject: [PATCH] HDFS-5591. Checkpointing should use monotonic time when calculating period. Contributed by Charles Lamb. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1583926 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../hdfs/server/namenode/SecondaryNameNode.java | 6 ++++-- .../hdfs/server/namenode/ha/StandbyCheckpointer.java | 12 ++++++------ .../hdfs/server/namenode/TestSecondaryWebUi.java | 3 ++- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 53b31e5b8b..6740c8bac7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -299,6 +299,9 @@ Release 2.5.0 - UNRELEASED HDFS-6173. Move the default processor from Ls to Web in OfflineImageViewer. (Akira Ajisaka via wheat9) + HDFS-5591. Checkpointing should use monotonic time when calculating period. + (Charles Lamb via wang) + Release 2.4.1 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java index 573939f1fb..d8df2e9435 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java @@ -129,7 +129,9 @@ public class SecondaryNameNode implements Runnable { return getClass().getSimpleName() + " Status" + "\nName Node Address : " + nameNodeAddr + "\nStart Time : " + new Date(starttime) - + "\nLast Checkpoint Time : " + (lastCheckpointTime == 0? "--": new Date(lastCheckpointTime)) + + "\nLast Checkpoint : " + (lastCheckpointTime == 0? "--": + ((Time.monotonicNow() - lastCheckpointTime) / 1000)) + + " seconds ago" + "\nCheckpoint Period : " + checkpointConf.getPeriod() + " seconds" + "\nCheckpoint Size : " + StringUtils.byteDesc(checkpointConf.getTxnCount()) + " (= " + checkpointConf.getTxnCount() + " bytes)" @@ -376,7 +378,7 @@ public class SecondaryNameNode implements Runnable { if(UserGroupInformation.isSecurityEnabled()) UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab(); - long now = Time.now(); + final long now = Time.monotonicNow(); if (shouldCheckpointBasedOnCount() || now >= lastCheckpointTime + 1000 * checkpointConf.getPeriod()) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java index d0431cd862..27e5b3c488 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hdfs.server.namenode.ha; -import static org.apache.hadoop.util.Time.now; +import static org.apache.hadoop.util.Time.monotonicNow; import java.io.IOException; import java.net.URI; @@ -277,14 +277,14 @@ public class StandbyCheckpointer { * prevented */ private void preventCheckpointsFor(long delayMs) { - preventCheckpointsUntil = now() + delayMs; + preventCheckpointsUntil = monotonicNow() + delayMs; } private void doWork() { final long checkPeriod = 1000 * checkpointConf.getCheckPeriod(); // Reset checkpoint time so that we don't always checkpoint // on startup. - lastCheckpointTime = now(); + lastCheckpointTime = monotonicNow(); while (shouldRun) { boolean needRollbackCheckpoint = namesystem.isNeedRollbackFsImage(); if (!needRollbackCheckpoint) { @@ -302,9 +302,9 @@ public class StandbyCheckpointer { UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab(); } - long now = now(); - long uncheckpointed = countUncheckpointedTxns(); - long secsSinceLast = (now - lastCheckpointTime)/1000; + final long now = monotonicNow(); + final long uncheckpointed = countUncheckpointedTxns(); + final long secsSinceLast = (now - lastCheckpointTime) / 1000; boolean needCheckpoint = needRollbackCheckpoint; if (needCheckpoint) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java index 8a5b8c584f..aba96e1c21 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java @@ -62,7 +62,8 @@ public class TestSecondaryWebUi { public void testSecondaryWebUi() throws IOException { String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" + SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp")); - assertTrue(pageContents.contains("Last Checkpoint Time")); + assertTrue("Didn't find \"Last Checkpoint\"", + pageContents.contains("Last Checkpoint")); } @Test