HADOOP-14276. Add a nanosecond API to Time/Timer/FakeTimer. Contributed by Erik Krogen.
This commit is contained in:
parent
0116c3c957
commit
95b7f1d29a
@ -77,14 +77,6 @@ public int compare(Entry left, Entry right) {
|
||||
}
|
||||
};
|
||||
|
||||
/** A clock for measuring time so that it can be mocked in unit tests. */
|
||||
static class Clock {
|
||||
/** @return the current time. */
|
||||
long currentTime() {
|
||||
return System.nanoTime();
|
||||
}
|
||||
}
|
||||
|
||||
private static int updateRecommendedLength(int recommendedLength,
|
||||
int sizeLimit) {
|
||||
return sizeLimit > 0 && sizeLimit < recommendedLength?
|
||||
@ -102,7 +94,7 @@ private static int updateRecommendedLength(int recommendedLength,
|
||||
private final long creationExpirationPeriod;
|
||||
private final long accessExpirationPeriod;
|
||||
private final int sizeLimit;
|
||||
private final Clock clock;
|
||||
private final Timer timer;
|
||||
|
||||
/**
|
||||
* @param recommendedLength Recommended size of the internal array.
|
||||
@ -120,7 +112,7 @@ public LightWeightCache(final int recommendedLength,
|
||||
final long creationExpirationPeriod,
|
||||
final long accessExpirationPeriod) {
|
||||
this(recommendedLength, sizeLimit,
|
||||
creationExpirationPeriod, accessExpirationPeriod, new Clock());
|
||||
creationExpirationPeriod, accessExpirationPeriod, new Timer());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
@ -128,7 +120,7 @@ public LightWeightCache(final int recommendedLength,
|
||||
final int sizeLimit,
|
||||
final long creationExpirationPeriod,
|
||||
final long accessExpirationPeriod,
|
||||
final Clock clock) {
|
||||
final Timer timer) {
|
||||
super(updateRecommendedLength(recommendedLength, sizeLimit));
|
||||
|
||||
this.sizeLimit = sizeLimit;
|
||||
@ -147,11 +139,11 @@ public LightWeightCache(final int recommendedLength,
|
||||
|
||||
this.queue = new PriorityQueue<Entry>(
|
||||
sizeLimit > 0? sizeLimit + 1: 1 << 10, expirationTimeComparator);
|
||||
this.clock = clock;
|
||||
this.timer = timer;
|
||||
}
|
||||
|
||||
void setExpirationTime(final Entry e, final long expirationPeriod) {
|
||||
e.setExpirationTime(clock.currentTime() + expirationPeriod);
|
||||
e.setExpirationTime(timer.monotonicNowNanos() + expirationPeriod);
|
||||
}
|
||||
|
||||
boolean isExpired(final Entry e, final long now) {
|
||||
@ -168,7 +160,7 @@ private E evict() {
|
||||
|
||||
/** Evict expired entries. */
|
||||
private void evictExpiredEntries() {
|
||||
final long now = clock.currentTime();
|
||||
final long now = timer.monotonicNowNanos();
|
||||
for(int i = 0; i < EVICTION_LIMIT; i++) {
|
||||
final Entry peeked = queue.peek();
|
||||
if (peeked == null || !isExpired(peeked, now)) {
|
||||
|
@ -65,6 +65,16 @@ public static long monotonicNow() {
|
||||
return System.nanoTime() / NANOSECONDS_PER_MILLISECOND;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #monotonicNow()} but returns its result in nanoseconds.
|
||||
* Note that this is subject to the same resolution constraints as
|
||||
* {@link System#nanoTime()}.
|
||||
* @return a monotonic clock that counts in nanoseconds.
|
||||
*/
|
||||
public static long monotonicNowNanos() {
|
||||
return System.nanoTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert time in millisecond to human readable format.
|
||||
* @return a human readable string for the input time
|
||||
|
@ -48,4 +48,14 @@ public long now() {
|
||||
* @return a monotonic clock that counts in milliseconds.
|
||||
*/
|
||||
public long monotonicNow() { return Time.monotonicNow(); }
|
||||
|
||||
/**
|
||||
* Same as {@link #monotonicNow()} but returns its result in nanoseconds.
|
||||
* Note that this is subject to the same resolution constraints as
|
||||
* {@link System#nanoTime()}.
|
||||
* @return a monotonic clock that counts in nanoseconds.
|
||||
*/
|
||||
public long monotonicNowNanos() {
|
||||
return Time.monotonicNowNanos();
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
@ -28,25 +29,38 @@
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public class FakeTimer extends Timer {
|
||||
private long nowMillis;
|
||||
private long nowNanos;
|
||||
|
||||
/** Constructs a FakeTimer with a non-zero value */
|
||||
public FakeTimer() {
|
||||
nowMillis = 1000; // Initialize with a non-trivial value.
|
||||
nowNanos = 1000; // Initialize with a non-trivial value.
|
||||
}
|
||||
|
||||
@Override
|
||||
public long now() {
|
||||
return nowMillis;
|
||||
return TimeUnit.NANOSECONDS.toMillis(nowNanos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long monotonicNow() {
|
||||
return nowMillis;
|
||||
return TimeUnit.NANOSECONDS.toMillis(nowNanos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long monotonicNowNanos() {
|
||||
return nowNanos;
|
||||
}
|
||||
|
||||
/** Increases the time by milliseconds */
|
||||
public void advance(long advMillis) {
|
||||
nowMillis += advMillis;
|
||||
nowNanos += TimeUnit.MILLISECONDS.toNanos(advMillis);
|
||||
}
|
||||
|
||||
/**
|
||||
* Increases the time by nanoseconds.
|
||||
* @param advNanos Nanoseconds to advance by.
|
||||
*/
|
||||
public void advanceNanos(long advNanos) {
|
||||
nowNanos += advNanos;
|
||||
}
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ private static class LightWeightCacheTestCase implements GSet<IntEntry, IntEntry
|
||||
int iterate_count = 0;
|
||||
int contain_count = 0;
|
||||
|
||||
private long currentTestTime = ran.nextInt();
|
||||
private FakeTimer fakeTimer = new FakeTimer();
|
||||
|
||||
LightWeightCacheTestCase(int tablelength, int sizeLimit,
|
||||
long creationExpirationPeriod, long accessExpirationPeriod,
|
||||
@ -230,12 +230,7 @@ private static class LightWeightCacheTestCase implements GSet<IntEntry, IntEntry
|
||||
|
||||
data = new IntData(datasize, modulus);
|
||||
cache = new LightWeightCache<IntEntry, IntEntry>(tablelength, sizeLimit,
|
||||
creationExpirationPeriod, 0, new LightWeightCache.Clock() {
|
||||
@Override
|
||||
long currentTime() {
|
||||
return currentTestTime;
|
||||
}
|
||||
});
|
||||
creationExpirationPeriod, 0, fakeTimer);
|
||||
|
||||
Assert.assertEquals(0, cache.size());
|
||||
}
|
||||
@ -247,7 +242,7 @@ private boolean containsTest(IntEntry key) {
|
||||
} else {
|
||||
final IntEntry h = hashMap.remove(key);
|
||||
if (h != null) {
|
||||
Assert.assertTrue(cache.isExpired(h, currentTestTime));
|
||||
Assert.assertTrue(cache.isExpired(h, fakeTimer.monotonicNowNanos()));
|
||||
}
|
||||
}
|
||||
return c;
|
||||
@ -266,7 +261,7 @@ private IntEntry getTest(IntEntry key) {
|
||||
} else {
|
||||
final IntEntry h = hashMap.remove(key);
|
||||
if (h != null) {
|
||||
Assert.assertTrue(cache.isExpired(h, currentTestTime));
|
||||
Assert.assertTrue(cache.isExpired(h, fakeTimer.monotonicNowNanos()));
|
||||
}
|
||||
}
|
||||
return c;
|
||||
@ -286,7 +281,7 @@ private IntEntry putTest(IntEntry entry) {
|
||||
final IntEntry h = hashMap.put(entry);
|
||||
if (h != null && h != entry) {
|
||||
// if h == entry, its expiration time is already updated
|
||||
Assert.assertTrue(cache.isExpired(h, currentTestTime));
|
||||
Assert.assertTrue(cache.isExpired(h, fakeTimer.monotonicNowNanos()));
|
||||
}
|
||||
}
|
||||
return c;
|
||||
@ -305,7 +300,7 @@ private IntEntry removeTest(IntEntry key) {
|
||||
} else {
|
||||
final IntEntry h = hashMap.remove(key);
|
||||
if (h != null) {
|
||||
Assert.assertTrue(cache.isExpired(h, currentTestTime));
|
||||
Assert.assertTrue(cache.isExpired(h, fakeTimer.monotonicNowNanos()));
|
||||
}
|
||||
}
|
||||
return c;
|
||||
@ -339,7 +334,7 @@ boolean tossCoin() {
|
||||
}
|
||||
|
||||
void check() {
|
||||
currentTestTime += ran.nextInt() & 0x3;
|
||||
fakeTimer.advanceNanos(ran.nextInt() & 0x3);
|
||||
|
||||
//test size
|
||||
sizeTest();
|
||||
|
Loading…
Reference in New Issue
Block a user