From 8ad510bb93790fdb7ea8ff16aa6dcff44e4f3bea Mon Sep 17 00:00:00 2001
From: Jonathan Turner Eagles
Date: Tue, 22 Oct 2013 03:41:49 +0000
Subject: [PATCH] HADOOP-9291. enhance unit-test coverage of package
o.a.h.metrics2 (Ivan A. Veselovsky via jeagles)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1534474 13f79535-47bb-0310-9956-ffa450edef68
---
.../hadoop-common/CHANGES.txt | 3 +
.../filter/AbstractPatternFilter.java | 2 +-
.../apache/hadoop/metrics2/package-info.java | 2 +-
.../metrics2/filter/TestPatternFilter.java | 63 ++++++--
.../hadoop/metrics2/sink/TestFileSink.java | 138 ++++++++++++++++++
5 files changed, 196 insertions(+), 12 deletions(-)
create mode 100644 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 19e0c44196..204715631a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -366,6 +366,9 @@ Release 2.3.0 - UNRELEASED
HDFS-5276. FileSystem.Statistics should use thread-local counters to avoid
multi-threaded performance issues on read/write. (Colin Patrick McCabe)
+ HADOOP-9291. enhance unit-test coverage of package o.a.h.metrics2 (Ivan A.
+ Veselovsky via jeagles)
+
OPTIMIZATIONS
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java
index 1f779735a5..07b50ab977 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java
@@ -112,7 +112,7 @@ public boolean accepts(MetricsTag tag) {
return false;
}
// Reject if no match in whitelist only mode
- if (ipat != null && epat == null) {
+ if (!includeTagPatterns.isEmpty() && excludeTagPatterns.isEmpty()) {
return false;
}
return true;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java
index 2f787d0449..be2149977c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java
@@ -234,7 +234,7 @@ identify a particular sink instance. The asterisk (*
) can be
patterns.
Similarly, you can specify the record.filter
and
- metrics.filter
options, which operate at record and metric
+ metric.filter
options, which operate at record and metric
level, respectively. Filters can be combined to optimize
the filtering efficiency.
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
index 2bdfdb978a..a8f38d6136 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
@@ -23,9 +23,11 @@
import org.apache.commons.configuration.SubsetConfiguration;
import org.junit.Test;
+
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
+import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.impl.ConfigBuilder;
@@ -53,7 +55,7 @@ public class TestPatternFilter {
.add("p.include.tags", "foo:f").subset("p");
shouldAccept(wl, "foo");
shouldAccept(wl, Arrays.asList(tag("bar", "", ""),
- tag("foo", "", "f")));
+ tag("foo", "", "f")), new boolean[] {false, true});
shouldAccept(wl, mockMetricsRecord("foo", Arrays.asList(
tag("bar", "", ""), tag("foo", "", "f"))));
shouldReject(wl, "bar");
@@ -78,7 +80,7 @@ public class TestPatternFilter {
tag("bar", "", ""))));
shouldReject(bl, "foo");
shouldReject(bl, Arrays.asList(tag("bar", "", ""),
- tag("foo", "", "f")));
+ tag("foo", "", "f")), new boolean[] {true, false});
shouldReject(bl, mockMetricsRecord("foo", Arrays.asList(
tag("bar", "", ""))));
shouldReject(bl, mockMetricsRecord("bar", Arrays.asList(
@@ -125,15 +127,61 @@ public class TestPatternFilter {
shouldAccept(c, mockMetricsRecord("foo", Arrays.asList(
tag("foo", "", "f"))));
}
-
+
static void shouldAccept(SubsetConfiguration conf, String s) {
assertTrue("accepts "+ s, newGlobFilter(conf).accepts(s));
assertTrue("accepts "+ s, newRegexFilter(conf).accepts(s));
}
+ // Version for one tag:
static void shouldAccept(SubsetConfiguration conf, List tags) {
- assertTrue("accepts "+ tags, newGlobFilter(conf).accepts(tags));
- assertTrue("accepts "+ tags, newRegexFilter(conf).accepts(tags));
+ shouldAcceptImpl(true, conf, tags, new boolean[] {true});
+ }
+ // Version for multiple tags:
+ static void shouldAccept(SubsetConfiguration conf, List tags,
+ boolean[] expectedAcceptedSpec) {
+ shouldAcceptImpl(true, conf, tags, expectedAcceptedSpec);
+ }
+
+ // Version for one tag:
+ static void shouldReject(SubsetConfiguration conf, List tags) {
+ shouldAcceptImpl(false, conf, tags, new boolean[] {false});
+ }
+ // Version for multiple tags:
+ static void shouldReject(SubsetConfiguration conf, List tags,
+ boolean[] expectedAcceptedSpec) {
+ shouldAcceptImpl(false, conf, tags, expectedAcceptedSpec);
+ }
+
+ private static void shouldAcceptImpl(final boolean expectAcceptList,
+ SubsetConfiguration conf, List tags, boolean[] expectedAcceptedSpec) {
+ final MetricsFilter globFilter = newGlobFilter(conf);
+ final MetricsFilter regexFilter = newRegexFilter(conf);
+
+ // Test acceptance of the tag list:
+ assertEquals("accepts "+ tags, expectAcceptList, globFilter.accepts(tags));
+ assertEquals("accepts "+ tags, expectAcceptList, regexFilter.accepts(tags));
+
+ // Test results on each of the individual tags:
+ int acceptedCount = 0;
+ for (int i=0; i 0);
+ } else {
+ // At least one individual tag should be rejected:
+ assertTrue("No tag of the following rejected: " + tags, acceptedCount < tags.size());
+ }
}
/**
@@ -152,11 +200,6 @@ static void shouldReject(SubsetConfiguration conf, String s) {
assertTrue("rejects "+ s, !newRegexFilter(conf).accepts(s));
}
- static void shouldReject(SubsetConfiguration conf, List tags) {
- assertTrue("rejects "+ tags, !newGlobFilter(conf).accepts(tags));
- assertTrue("rejects "+ tags, !newRegexFilter(conf).accepts(tags));
- }
-
/**
* Asserts that filters with the given configuration reject the given record.
*
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
new file mode 100644
index 0000000000..8c918b8431
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.annotation.Metric.Type;
+import org.apache.hadoop.metrics2.impl.ConfigBuilder;
+import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
+import org.apache.hadoop.metrics2.impl.TestMetricsConfig;
+import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
+import org.junit.After;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestFileSink {
+
+ private File outFile;
+
+ // The 2 sample metric classes:
+ @Metrics(name="testRecord1", context="test1")
+ static class MyMetrics1 {
+ @Metric(value={"testTag1", ""}, type=Type.TAG)
+ String testTag1() { return "testTagValue1"; }
+
+ @Metric(value={"testTag2", ""}, type=Type.TAG)
+ String gettestTag2() { return "testTagValue2"; }
+
+ @Metric(value={"testMetric1", "An integer gauge"},always=true)
+ MutableGaugeInt testMetric1;
+
+ @Metric(value={"testMetric2", "An integer gauge"},always=true)
+ MutableGaugeInt testMetric2;
+
+ public MyMetrics1 registerWith(MetricsSystem ms) {
+ return ms.register("m1", null, this);
+ }
+ }
+
+ @Metrics(name="testRecord2", context="test1")
+ static class MyMetrics2 {
+ @Metric(value={"testTag22", ""}, type=Type.TAG)
+ String testTag1() { return "testTagValue22"; }
+
+ public MyMetrics2 registerWith(MetricsSystem ms) {
+ return ms.register("m2", null, this);
+ }
+ }
+
+ private File getTestTempFile(String prefix, String suffix) throws IOException {
+ String tmpPath = System.getProperty("java.io.tmpdir", "/tmp");
+ String user = System.getProperty("user.name", "unknown-user");
+ File dir = new File(tmpPath + "/" + user);
+ dir.mkdirs();
+ return File.createTempFile(prefix, suffix, dir);
+ }
+
+ @Test(timeout=6000)
+ public void testFileSink() throws IOException {
+ outFile = getTestTempFile("test-file-sink-", ".out");
+ final String outPath = outFile.getAbsolutePath();
+
+ // NB: specify large period to avoid multiple metrics snapshotting:
+ new ConfigBuilder().add("*.period", 10000)
+ .add("test.sink.mysink0.class", FileSink.class.getName())
+ .add("test.sink.mysink0.filename", outPath)
+ // NB: we filter by context to exclude "metricssystem" context metrics:
+ .add("test.sink.mysink0.context", "test1")
+ .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
+ MetricsSystemImpl ms = new MetricsSystemImpl("test");
+ ms.start();
+
+ final MyMetrics1 mm1
+ = new MyMetrics1().registerWith(ms);
+ new MyMetrics2().registerWith(ms);
+
+ mm1.testMetric1.incr();
+ mm1.testMetric2.incr(2);
+
+ ms.publishMetricsNow(); // publish the metrics
+ ms.stop();
+ ms.shutdown();
+
+ InputStream is = new FileInputStream(outFile);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream((int)outFile.length());
+ IOUtils.copyBytes(is, baos, 1024, true);
+ String outFileContent = new String(baos.toByteArray(), "UTF-8");
+
+ // Check the out file content. Should be something like the following:
+ //1360244820087 test1.testRecord1: Context=test1, testTag1=testTagValue1, testTag2=testTagValue2, Hostname=myhost, testMetric1=1, testMetric2=2
+ //1360244820089 test1.testRecord2: Context=test1, testTag22=testTagValue22, Hostname=myhost
+
+ // Note that in the below expression we allow tags and metrics to go in arbitrary order.
+ Pattern expectedContentPattern = Pattern.compile(
+ // line #1:
+ "^\\d+\\s+test1.testRecord1:\\s+Context=test1,\\s+" +
+ "(testTag1=testTagValue1,\\s+testTag2=testTagValue2|testTag2=testTagValue2,\\s+testTag1=testTagValue1)," +
+ "\\s+Hostname=.*,\\s+(testMetric1=1,\\s+testMetric2=2|testMetric2=2,\\s+testMetric1=1)" +
+ // line #2:
+ "$[\\n\\r]*^\\d+\\s+test1.testRecord2:\\s+Context=test1," +
+ "\\s+testTag22=testTagValue22,\\s+Hostname=.*$[\\n\\r]*",
+ Pattern.MULTILINE);
+ assertTrue(expectedContentPattern.matcher(outFileContent).matches());
+ }
+
+ @After
+ public void after() {
+ if (outFile != null) {
+ outFile.delete();
+ assertTrue(!outFile.exists());
+ }
+ }
+}