HADOOP-9291. enhance unit-test coverage of package o.a.h.metrics2 (Ivan A. Veselovsky via jeagles)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1534474 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
df87ed34f2
commit
8ad510bb93
@ -366,6 +366,9 @@ Release 2.3.0 - UNRELEASED
|
|||||||
HDFS-5276. FileSystem.Statistics should use thread-local counters to avoid
|
HDFS-5276. FileSystem.Statistics should use thread-local counters to avoid
|
||||||
multi-threaded performance issues on read/write. (Colin Patrick McCabe)
|
multi-threaded performance issues on read/write. (Colin Patrick McCabe)
|
||||||
|
|
||||||
|
HADOOP-9291. enhance unit-test coverage of package o.a.h.metrics2 (Ivan A.
|
||||||
|
Veselovsky via jeagles)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
|
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
|
||||||
|
@ -112,7 +112,7 @@ public boolean accepts(MetricsTag tag) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// Reject if no match in whitelist only mode
|
// Reject if no match in whitelist only mode
|
||||||
if (ipat != null && epat == null) {
|
if (!includeTagPatterns.isEmpty() && excludeTagPatterns.isEmpty()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
@ -234,7 +234,7 @@ identify a particular sink instance. The asterisk (<code>*</code>) can be
|
|||||||
patterns.
|
patterns.
|
||||||
</p>
|
</p>
|
||||||
<p>Similarly, you can specify the <code>record.filter</code> and
|
<p>Similarly, you can specify the <code>record.filter</code> and
|
||||||
<code>metrics.filter</code> options, which operate at record and metric
|
<code>metric.filter</code> options, which operate at record and metric
|
||||||
level, respectively. Filters can be combined to optimize
|
level, respectively. Filters can be combined to optimize
|
||||||
the filtering efficiency.</p>
|
the filtering efficiency.</p>
|
||||||
|
|
||||||
|
@ -23,9 +23,11 @@
|
|||||||
|
|
||||||
import org.apache.commons.configuration.SubsetConfiguration;
|
import org.apache.commons.configuration.SubsetConfiguration;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.mockito.Mockito.*;
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
|
import org.apache.hadoop.metrics2.MetricsFilter;
|
||||||
import org.apache.hadoop.metrics2.MetricsRecord;
|
import org.apache.hadoop.metrics2.MetricsRecord;
|
||||||
import org.apache.hadoop.metrics2.MetricsTag;
|
import org.apache.hadoop.metrics2.MetricsTag;
|
||||||
import org.apache.hadoop.metrics2.impl.ConfigBuilder;
|
import org.apache.hadoop.metrics2.impl.ConfigBuilder;
|
||||||
@ -53,7 +55,7 @@ public class TestPatternFilter {
|
|||||||
.add("p.include.tags", "foo:f").subset("p");
|
.add("p.include.tags", "foo:f").subset("p");
|
||||||
shouldAccept(wl, "foo");
|
shouldAccept(wl, "foo");
|
||||||
shouldAccept(wl, Arrays.asList(tag("bar", "", ""),
|
shouldAccept(wl, Arrays.asList(tag("bar", "", ""),
|
||||||
tag("foo", "", "f")));
|
tag("foo", "", "f")), new boolean[] {false, true});
|
||||||
shouldAccept(wl, mockMetricsRecord("foo", Arrays.asList(
|
shouldAccept(wl, mockMetricsRecord("foo", Arrays.asList(
|
||||||
tag("bar", "", ""), tag("foo", "", "f"))));
|
tag("bar", "", ""), tag("foo", "", "f"))));
|
||||||
shouldReject(wl, "bar");
|
shouldReject(wl, "bar");
|
||||||
@ -78,7 +80,7 @@ public class TestPatternFilter {
|
|||||||
tag("bar", "", ""))));
|
tag("bar", "", ""))));
|
||||||
shouldReject(bl, "foo");
|
shouldReject(bl, "foo");
|
||||||
shouldReject(bl, Arrays.asList(tag("bar", "", ""),
|
shouldReject(bl, Arrays.asList(tag("bar", "", ""),
|
||||||
tag("foo", "", "f")));
|
tag("foo", "", "f")), new boolean[] {true, false});
|
||||||
shouldReject(bl, mockMetricsRecord("foo", Arrays.asList(
|
shouldReject(bl, mockMetricsRecord("foo", Arrays.asList(
|
||||||
tag("bar", "", ""))));
|
tag("bar", "", ""))));
|
||||||
shouldReject(bl, mockMetricsRecord("bar", Arrays.asList(
|
shouldReject(bl, mockMetricsRecord("bar", Arrays.asList(
|
||||||
@ -131,9 +133,55 @@ static void shouldAccept(SubsetConfiguration conf, String s) {
|
|||||||
assertTrue("accepts "+ s, newRegexFilter(conf).accepts(s));
|
assertTrue("accepts "+ s, newRegexFilter(conf).accepts(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Version for one tag:
|
||||||
static void shouldAccept(SubsetConfiguration conf, List<MetricsTag> tags) {
|
static void shouldAccept(SubsetConfiguration conf, List<MetricsTag> tags) {
|
||||||
assertTrue("accepts "+ tags, newGlobFilter(conf).accepts(tags));
|
shouldAcceptImpl(true, conf, tags, new boolean[] {true});
|
||||||
assertTrue("accepts "+ tags, newRegexFilter(conf).accepts(tags));
|
}
|
||||||
|
// Version for multiple tags:
|
||||||
|
static void shouldAccept(SubsetConfiguration conf, List<MetricsTag> tags,
|
||||||
|
boolean[] expectedAcceptedSpec) {
|
||||||
|
shouldAcceptImpl(true, conf, tags, expectedAcceptedSpec);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Version for one tag:
|
||||||
|
static void shouldReject(SubsetConfiguration conf, List<MetricsTag> tags) {
|
||||||
|
shouldAcceptImpl(false, conf, tags, new boolean[] {false});
|
||||||
|
}
|
||||||
|
// Version for multiple tags:
|
||||||
|
static void shouldReject(SubsetConfiguration conf, List<MetricsTag> tags,
|
||||||
|
boolean[] expectedAcceptedSpec) {
|
||||||
|
shouldAcceptImpl(false, conf, tags, expectedAcceptedSpec);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void shouldAcceptImpl(final boolean expectAcceptList,
|
||||||
|
SubsetConfiguration conf, List<MetricsTag> tags, boolean[] expectedAcceptedSpec) {
|
||||||
|
final MetricsFilter globFilter = newGlobFilter(conf);
|
||||||
|
final MetricsFilter regexFilter = newRegexFilter(conf);
|
||||||
|
|
||||||
|
// Test acceptance of the tag list:
|
||||||
|
assertEquals("accepts "+ tags, expectAcceptList, globFilter.accepts(tags));
|
||||||
|
assertEquals("accepts "+ tags, expectAcceptList, regexFilter.accepts(tags));
|
||||||
|
|
||||||
|
// Test results on each of the individual tags:
|
||||||
|
int acceptedCount = 0;
|
||||||
|
for (int i=0; i<tags.size(); i++) {
|
||||||
|
MetricsTag tag = tags.get(i);
|
||||||
|
boolean actGlob = globFilter.accepts(tag);
|
||||||
|
boolean actRegex = regexFilter.accepts(tag);
|
||||||
|
assertEquals("accepts "+tag, expectedAcceptedSpec[i], actGlob);
|
||||||
|
// Both the filters should give the same result:
|
||||||
|
assertEquals(actGlob, actRegex);
|
||||||
|
if (actGlob) {
|
||||||
|
acceptedCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (expectAcceptList) {
|
||||||
|
// At least one individual tag should be accepted:
|
||||||
|
assertTrue("No tag of the following accepted: " + tags, acceptedCount > 0);
|
||||||
|
} else {
|
||||||
|
// At least one individual tag should be rejected:
|
||||||
|
assertTrue("No tag of the following rejected: " + tags, acceptedCount < tags.size());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -152,11 +200,6 @@ static void shouldReject(SubsetConfiguration conf, String s) {
|
|||||||
assertTrue("rejects "+ s, !newRegexFilter(conf).accepts(s));
|
assertTrue("rejects "+ s, !newRegexFilter(conf).accepts(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void shouldReject(SubsetConfiguration conf, List<MetricsTag> tags) {
|
|
||||||
assertTrue("rejects "+ tags, !newGlobFilter(conf).accepts(tags));
|
|
||||||
assertTrue("rejects "+ tags, !newRegexFilter(conf).accepts(tags));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Asserts that filters with the given configuration reject the given record.
|
* Asserts that filters with the given configuration reject the given record.
|
||||||
*
|
*
|
||||||
|
@ -0,0 +1,138 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.metrics2.sink;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import org.apache.hadoop.io.IOUtils;
|
||||||
|
import org.apache.hadoop.metrics2.MetricsSystem;
|
||||||
|
import org.apache.hadoop.metrics2.annotation.Metric;
|
||||||
|
import org.apache.hadoop.metrics2.annotation.Metrics;
|
||||||
|
import org.apache.hadoop.metrics2.annotation.Metric.Type;
|
||||||
|
import org.apache.hadoop.metrics2.impl.ConfigBuilder;
|
||||||
|
import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
|
||||||
|
import org.apache.hadoop.metrics2.impl.TestMetricsConfig;
|
||||||
|
import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
public class TestFileSink {
|
||||||
|
|
||||||
|
private File outFile;
|
||||||
|
|
||||||
|
// The 2 sample metric classes:
|
||||||
|
@Metrics(name="testRecord1", context="test1")
|
||||||
|
static class MyMetrics1 {
|
||||||
|
@Metric(value={"testTag1", ""}, type=Type.TAG)
|
||||||
|
String testTag1() { return "testTagValue1"; }
|
||||||
|
|
||||||
|
@Metric(value={"testTag2", ""}, type=Type.TAG)
|
||||||
|
String gettestTag2() { return "testTagValue2"; }
|
||||||
|
|
||||||
|
@Metric(value={"testMetric1", "An integer gauge"},always=true)
|
||||||
|
MutableGaugeInt testMetric1;
|
||||||
|
|
||||||
|
@Metric(value={"testMetric2", "An integer gauge"},always=true)
|
||||||
|
MutableGaugeInt testMetric2;
|
||||||
|
|
||||||
|
public MyMetrics1 registerWith(MetricsSystem ms) {
|
||||||
|
return ms.register("m1", null, this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Metrics(name="testRecord2", context="test1")
|
||||||
|
static class MyMetrics2 {
|
||||||
|
@Metric(value={"testTag22", ""}, type=Type.TAG)
|
||||||
|
String testTag1() { return "testTagValue22"; }
|
||||||
|
|
||||||
|
public MyMetrics2 registerWith(MetricsSystem ms) {
|
||||||
|
return ms.register("m2", null, this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private File getTestTempFile(String prefix, String suffix) throws IOException {
|
||||||
|
String tmpPath = System.getProperty("java.io.tmpdir", "/tmp");
|
||||||
|
String user = System.getProperty("user.name", "unknown-user");
|
||||||
|
File dir = new File(tmpPath + "/" + user);
|
||||||
|
dir.mkdirs();
|
||||||
|
return File.createTempFile(prefix, suffix, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(timeout=6000)
|
||||||
|
public void testFileSink() throws IOException {
|
||||||
|
outFile = getTestTempFile("test-file-sink-", ".out");
|
||||||
|
final String outPath = outFile.getAbsolutePath();
|
||||||
|
|
||||||
|
// NB: specify large period to avoid multiple metrics snapshotting:
|
||||||
|
new ConfigBuilder().add("*.period", 10000)
|
||||||
|
.add("test.sink.mysink0.class", FileSink.class.getName())
|
||||||
|
.add("test.sink.mysink0.filename", outPath)
|
||||||
|
// NB: we filter by context to exclude "metricssystem" context metrics:
|
||||||
|
.add("test.sink.mysink0.context", "test1")
|
||||||
|
.save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
|
||||||
|
MetricsSystemImpl ms = new MetricsSystemImpl("test");
|
||||||
|
ms.start();
|
||||||
|
|
||||||
|
final MyMetrics1 mm1
|
||||||
|
= new MyMetrics1().registerWith(ms);
|
||||||
|
new MyMetrics2().registerWith(ms);
|
||||||
|
|
||||||
|
mm1.testMetric1.incr();
|
||||||
|
mm1.testMetric2.incr(2);
|
||||||
|
|
||||||
|
ms.publishMetricsNow(); // publish the metrics
|
||||||
|
ms.stop();
|
||||||
|
ms.shutdown();
|
||||||
|
|
||||||
|
InputStream is = new FileInputStream(outFile);
|
||||||
|
ByteArrayOutputStream baos = new ByteArrayOutputStream((int)outFile.length());
|
||||||
|
IOUtils.copyBytes(is, baos, 1024, true);
|
||||||
|
String outFileContent = new String(baos.toByteArray(), "UTF-8");
|
||||||
|
|
||||||
|
// Check the out file content. Should be something like the following:
|
||||||
|
//1360244820087 test1.testRecord1: Context=test1, testTag1=testTagValue1, testTag2=testTagValue2, Hostname=myhost, testMetric1=1, testMetric2=2
|
||||||
|
//1360244820089 test1.testRecord2: Context=test1, testTag22=testTagValue22, Hostname=myhost
|
||||||
|
|
||||||
|
// Note that in the below expression we allow tags and metrics to go in arbitrary order.
|
||||||
|
Pattern expectedContentPattern = Pattern.compile(
|
||||||
|
// line #1:
|
||||||
|
"^\\d+\\s+test1.testRecord1:\\s+Context=test1,\\s+" +
|
||||||
|
"(testTag1=testTagValue1,\\s+testTag2=testTagValue2|testTag2=testTagValue2,\\s+testTag1=testTagValue1)," +
|
||||||
|
"\\s+Hostname=.*,\\s+(testMetric1=1,\\s+testMetric2=2|testMetric2=2,\\s+testMetric1=1)" +
|
||||||
|
// line #2:
|
||||||
|
"$[\\n\\r]*^\\d+\\s+test1.testRecord2:\\s+Context=test1," +
|
||||||
|
"\\s+testTag22=testTagValue22,\\s+Hostname=.*$[\\n\\r]*",
|
||||||
|
Pattern.MULTILINE);
|
||||||
|
assertTrue(expectedContentPattern.matcher(outFileContent).matches());
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() {
|
||||||
|
if (outFile != null) {
|
||||||
|
outFile.delete();
|
||||||
|
assertTrue(!outFile.exists());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user