YARN-11260. Upgrade JUnit from 4 to 5 in hadoop-yarn-server-timelineservice (#4775)

Co-authored-by: Ashutosh Gupta <ashugpt@amazon.com>
Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
Ashutosh Gupta 2022-10-09 16:28:03 +01:00 committed by GitHub
parent b0bfd09c41
commit 9a7d0e7ed0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 711 additions and 622 deletions

View File

@ -125,17 +125,26 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.assertj</groupId> <groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId> <artifactId>assertj-core</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-launcher</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.mockito</groupId> <groupId>org.mockito</groupId>

View File

@ -19,17 +19,6 @@
package org.apache.hadoop.yarn.server.timelineservice.collector; package org.apache.hadoop.yarn.server.timelineservice.collector;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -38,6 +27,11 @@
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -47,14 +41,22 @@
import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextResponse;
import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
import org.junit.After;
import org.junit.Before; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.Test; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
public class TestNMTimelineCollectorManager { public class TestNMTimelineCollectorManager {
private NodeTimelineCollectorManager collectorManager; private NodeTimelineCollectorManager collectorManager;
@Before @BeforeEach
public void setup() throws Exception { public void setup() throws Exception {
collectorManager = createCollectorManager(); collectorManager = createCollectorManager();
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
@ -66,7 +68,7 @@ public void setup() throws Exception {
collectorManager.start(); collectorManager.start();
} }
@After @AfterEach
public void tearDown() throws Exception { public void tearDown() throws Exception {
if (collectorManager != null) { if (collectorManager != null) {
collectorManager.stop(); collectorManager.stop();
@ -74,12 +76,12 @@ public void tearDown() throws Exception {
} }
@Test @Test
public void testStartingWriterFlusher() throws Exception { void testStartingWriterFlusher() throws Exception {
assertTrue(collectorManager.writerFlusherRunning()); assertTrue(collectorManager.writerFlusherRunning());
} }
@Test @Test
public void testStartWebApp() throws Exception { void testStartWebApp() throws Exception {
assertNotNull(collectorManager.getRestServerBindAddress()); assertNotNull(collectorManager.getRestServerBindAddress());
String address = collectorManager.getRestServerBindAddress(); String address = collectorManager.getRestServerBindAddress();
String[] parts = address.split(":"); String[] parts = address.split(":");
@ -89,8 +91,9 @@ public void testStartWebApp() throws Exception {
Integer.valueOf(parts[1]) <= 30100); Integer.valueOf(parts[1]) <= 30100);
} }
@Test(timeout=60000) @Test
public void testMultithreadedAdd() throws Exception { @Timeout(60000)
void testMultithreadedAdd() throws Exception {
final int numApps = 5; final int numApps = 5;
List<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>(); List<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>();
for (int i = 0; i < numApps; i++) { for (int i = 0; i < numApps; i++) {
@ -107,7 +110,7 @@ public Boolean call() {
ExecutorService executor = Executors.newFixedThreadPool(numApps); ExecutorService executor = Executors.newFixedThreadPool(numApps);
try { try {
List<Future<Boolean>> futures = executor.invokeAll(tasks); List<Future<Boolean>> futures = executor.invokeAll(tasks);
for (Future<Boolean> future: futures) { for (Future<Boolean> future : futures) {
assertTrue(future.get()); assertTrue(future.get());
} }
} finally { } finally {
@ -121,7 +124,7 @@ public Boolean call() {
} }
@Test @Test
public void testMultithreadedAddAndRemove() throws Exception { void testMultithreadedAddAndRemove() throws Exception {
final int numApps = 5; final int numApps = 5;
List<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>(); List<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>();
for (int i = 0; i < numApps; i++) { for (int i = 0; i < numApps; i++) {
@ -140,7 +143,7 @@ public Boolean call() {
ExecutorService executor = Executors.newFixedThreadPool(numApps); ExecutorService executor = Executors.newFixedThreadPool(numApps);
try { try {
List<Future<Boolean>> futures = executor.invokeAll(tasks); List<Future<Boolean>> futures = executor.invokeAll(tasks);
for (Future<Boolean> future: futures) { for (Future<Boolean> future : futures) {
assertTrue(future.get()); assertTrue(future.get());
} }
} finally { } finally {

View File

@ -18,11 +18,14 @@
package org.apache.hadoop.yarn.server.timelineservice.collector; package org.apache.hadoop.yarn.server.timelineservice.collector;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.yarn.server.timelineservice.metrics.PerNodeAggTimelineCollectorMetrics; import org.apache.hadoop.yarn.server.timelineservice.metrics.PerNodeAggTimelineCollectorMetrics;
import org.junit.After;
import org.junit.Assert; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.Before; import static org.junit.jupiter.api.Assertions.assertNotNull;
import org.junit.Test;
/** /**
* Test PerNodeAggTimelineCollectorMetrics. * Test PerNodeAggTimelineCollectorMetrics.
@ -32,24 +35,24 @@ public class TestPerNodeAggTimelineCollectorMetrics {
private PerNodeAggTimelineCollectorMetrics metrics; private PerNodeAggTimelineCollectorMetrics metrics;
@Test @Test
public void testTimelineCollectorMetrics() { void testTimelineCollectorMetrics() {
Assert.assertNotNull(metrics); assertNotNull(metrics);
Assert.assertEquals(10, assertEquals(10,
metrics.getPutEntitiesSuccessLatency().getInterval()); metrics.getPutEntitiesSuccessLatency().getInterval());
Assert.assertEquals(10, assertEquals(10,
metrics.getPutEntitiesFailureLatency().getInterval()); metrics.getPutEntitiesFailureLatency().getInterval());
Assert.assertEquals(10, assertEquals(10,
metrics.getAsyncPutEntitiesSuccessLatency().getInterval()); metrics.getAsyncPutEntitiesSuccessLatency().getInterval());
Assert.assertEquals(10, assertEquals(10,
metrics.getAsyncPutEntitiesFailureLatency().getInterval()); metrics.getAsyncPutEntitiesFailureLatency().getInterval());
} }
@Before @BeforeEach
public void setup() { public void setup() {
metrics = PerNodeAggTimelineCollectorMetrics.getInstance(); metrics = PerNodeAggTimelineCollectorMetrics.getInstance();
} }
@After @AfterEach
public void tearDown() { public void tearDown() {
PerNodeAggTimelineCollectorMetrics.destroy(); PerNodeAggTimelineCollectorMetrics.destroy();
} }

View File

@ -18,19 +18,13 @@
package org.apache.hadoop.yarn.server.timelineservice.collector; package org.apache.hadoop.yarn.server.timelineservice.collector;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
@ -47,9 +41,16 @@
import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextResponse;
import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
import org.junit.After;
import org.junit.Assert; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.Test; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
public class TestPerNodeTimelineCollectorsAuxService { public class TestPerNodeTimelineCollectorsAuxService {
private ApplicationAttemptId appAttemptId; private ApplicationAttemptId appAttemptId;
@ -70,7 +71,7 @@ public TestPerNodeTimelineCollectorsAuxService() {
1000L); 1000L);
} }
@After @AfterEach
public void tearDown() throws Shell.ExitCodeException { public void tearDown() throws Shell.ExitCodeException {
if (auxService != null) { if (auxService != null) {
auxService.stop(); auxService.stop();
@ -78,7 +79,7 @@ public void tearDown() throws Shell.ExitCodeException {
} }
@Test @Test
public void testAddApplication() throws Exception { void testAddApplication() throws Exception {
auxService = createCollectorAndAddApplication(); auxService = createCollectorAndAddApplication();
// auxService should have a single app // auxService should have a single app
assertTrue(auxService.hasApplication(appAttemptId.getApplicationId())); assertTrue(auxService.hasApplication(appAttemptId.getApplicationId()));
@ -86,7 +87,7 @@ public void testAddApplication() throws Exception {
} }
@Test @Test
public void testAddApplicationNonAMContainer() throws Exception { void testAddApplicationNonAMContainer() throws Exception {
auxService = createCollector(); auxService = createCollector();
ContainerId containerId = getContainerId(2L); // not an AM ContainerId containerId = getContainerId(2L); // not an AM
@ -99,7 +100,7 @@ public void testAddApplicationNonAMContainer() throws Exception {
} }
@Test @Test
public void testRemoveApplication() throws Exception { void testRemoveApplication() throws Exception {
auxService = createCollectorAndAddApplication(); auxService = createCollectorAndAddApplication();
// auxService should have a single app // auxService should have a single app
assertTrue(auxService.hasApplication(appAttemptId.getApplicationId())); assertTrue(auxService.hasApplication(appAttemptId.getApplicationId()));
@ -118,7 +119,7 @@ public void testRemoveApplication() throws Exception {
} }
@Test @Test
public void testRemoveApplicationNonAMContainer() throws Exception { void testRemoveApplicationNonAMContainer() throws Exception {
auxService = createCollectorAndAddApplication(); auxService = createCollectorAndAddApplication();
// auxService should have a single app // auxService should have a single app
assertTrue(auxService.hasApplication(appAttemptId.getApplicationId())); assertTrue(auxService.hasApplication(appAttemptId.getApplicationId()));
@ -133,8 +134,9 @@ public void testRemoveApplicationNonAMContainer() throws Exception {
auxService.close(); auxService.close();
} }
@Test(timeout = 60000) @Test
public void testLaunch() throws Exception { @Timeout(60000)
void testLaunch() throws Exception {
ExitUtil.disableSystemExit(); ExitUtil.disableSystemExit();
try { try {
auxService = auxService =
@ -192,7 +194,7 @@ protected Future removeApplicationCollector(ContainerId containerId) {
try { try {
future.get(); future.get();
} catch (Exception e) { } catch (Exception e) {
Assert.fail("Expeption thrown while removing collector"); fail("Expeption thrown while removing collector");
} }
return future; return future;
} }
@ -228,8 +230,9 @@ private ContainerId getContainerId(long id) {
return ContainerId.newContainerId(appAttemptId, id); return ContainerId.newContainerId(appAttemptId, id);
} }
@Test(timeout = 60000) @Test
public void testRemoveAppWhenSecondAttemptAMCotainerIsLaunchedSameNode() @Timeout(60000)
void testRemoveAppWhenSecondAttemptAMCotainerIsLaunchedSameNode()
throws Exception { throws Exception {
// add first attempt collector // add first attempt collector
auxService = createCollectorAndAddApplication(); auxService = createCollectorAndAddApplication();
@ -241,25 +244,25 @@ public void testRemoveAppWhenSecondAttemptAMCotainerIsLaunchedSameNode()
createContainerInitalizationContext(2); createContainerInitalizationContext(2);
auxService.initializeContainer(containerInitalizationContext); auxService.initializeContainer(containerInitalizationContext);
assertTrue("Applicatin not found in collectors.", assertTrue(auxService.hasApplication(appAttemptId.getApplicationId()),
auxService.hasApplication(appAttemptId.getApplicationId())); "Applicatin not found in collectors.");
// first attempt stop container // first attempt stop container
ContainerTerminationContext context = createContainerTerminationContext(1); ContainerTerminationContext context = createContainerTerminationContext(1);
auxService.stopContainer(context); auxService.stopContainer(context);
// 2nd attempt container removed, still collector should hold application id // 2nd attempt container removed, still collector should hold application id
assertTrue("collector has removed application though 2nd attempt" assertTrue(auxService.hasApplication(appAttemptId.getApplicationId()),
+ " is running this node", "collector has removed application though 2nd attempt"
auxService.hasApplication(appAttemptId.getApplicationId())); + " is running this node");
// second attempt stop container // second attempt stop container
context = createContainerTerminationContext(2); context = createContainerTerminationContext(2);
auxService.stopContainer(context); auxService.stopContainer(context);
// auxService should not have that app // auxService should not have that app
assertFalse("Application is not removed from collector", assertFalse(auxService.hasApplication(appAttemptId.getApplicationId()),
auxService.hasApplication(appAttemptId.getApplicationId())); "Application is not removed from collector");
auxService.close(); auxService.close();
} }

View File

@ -18,35 +18,35 @@
package org.apache.hadoop.yarn.server.timelineservice.collector; package org.apache.hadoop.yarn.server.timelineservice.collector;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineDomain;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetricOperation;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineWriteResponse;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector.AggregationStatusTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
import org.junit.Test;
import org.mockito.internal.stubbing.answers.AnswersWithDelay;
import org.mockito.internal.stubbing.answers.Returns;
import java.io.IOException; import java.io.IOException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.junit.jupiter.api.Test;
import org.mockito.internal.stubbing.answers.AnswersWithDelay;
import org.mockito.internal.stubbing.answers.Returns;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineDomain;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetricOperation;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineWriteResponse;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector.AggregationStatusTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.Assert.fail; import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never; import static org.mockito.Mockito.never;
@ -104,7 +104,7 @@ private TimelineEntities generateTestEntities(int groups, int entities) {
} }
@Test @Test
public void testAggregation() throws Exception { void testAggregation() throws Exception {
// Test aggregation with multiple groups. // Test aggregation with multiple groups.
int groups = 3; int groups = 3;
int n = 50; int n = 50;
@ -154,7 +154,7 @@ public void testAggregation() throws Exception {
* putEntity() calls. * putEntity() calls.
*/ */
@Test @Test
public void testPutEntity() throws IOException { void testPutEntity() throws IOException {
TimelineWriter writer = mock(TimelineWriter.class); TimelineWriter writer = mock(TimelineWriter.class);
TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth. TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth.
TimelineHealthStatus.RUNNING, ""); TimelineHealthStatus.RUNNING, "");
@ -163,7 +163,7 @@ public void testPutEntity() throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 5); conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 5);
conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS, conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS,
500L); 500L);
TimelineCollector collector = new TimelineCollectorForTest(writer); TimelineCollector collector = new TimelineCollectorForTest(writer);
collector.init(conf); collector.init(conf);
@ -179,7 +179,7 @@ public void testPutEntity() throws IOException {
@Test @Test
public void testPutEntityWithStorageDown() throws IOException { void testPutEntityWithStorageDown() throws IOException {
TimelineWriter writer = mock(TimelineWriter.class); TimelineWriter writer = mock(TimelineWriter.class);
TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth. TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth.
TimelineHealthStatus.CONNECTION_FAILURE, ""); TimelineHealthStatus.CONNECTION_FAILURE, "");
@ -188,7 +188,7 @@ public void testPutEntityWithStorageDown() throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 5); conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 5);
conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS, conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS,
500L); 500L);
TimelineCollector collector = new TimelineCollectorForTest(writer); TimelineCollector collector = new TimelineCollectorForTest(writer);
collector.init(conf); collector.init(conf);
@ -203,8 +203,8 @@ public void testPutEntityWithStorageDown() throws IOException {
exceptionCaught = true; exceptionCaught = true;
} }
} }
assertTrue("TimelineCollector putEntity failed to " + assertTrue(exceptionCaught, "TimelineCollector putEntity failed to " +
"handle storage down", exceptionCaught); "handle storage down");
} }
/** /**
@ -212,7 +212,7 @@ public void testPutEntityWithStorageDown() throws IOException {
* putEntityAsync() calls. * putEntityAsync() calls.
*/ */
@Test @Test
public void testPutEntityAsync() throws Exception { void testPutEntityAsync() throws Exception {
TimelineWriter writer = mock(TimelineWriter.class); TimelineWriter writer = mock(TimelineWriter.class);
TimelineCollector collector = new TimelineCollectorForTest(writer); TimelineCollector collector = new TimelineCollectorForTest(writer);
collector.init(new Configuration()); collector.init(new Configuration());
@ -232,7 +232,7 @@ public void testPutEntityAsync() throws Exception {
* write is taking too much time. * write is taking too much time.
*/ */
@Test @Test
public void testAsyncEntityDiscard() throws Exception { void testAsyncEntityDiscard() throws Exception {
TimelineWriter writer = mock(TimelineWriter.class); TimelineWriter writer = mock(TimelineWriter.class);
when(writer.write(any(), any(), any())).thenAnswer( when(writer.write(any(), any(), any())).thenAnswer(
@ -261,7 +261,8 @@ public void testAsyncEntityDiscard() throws Exception {
* Test TimelineCollector's interaction with TimelineWriter upon * Test TimelineCollector's interaction with TimelineWriter upon
* putDomain() calls. * putDomain() calls.
*/ */
@Test public void testPutDomain() throws IOException { @Test
void testPutDomain() throws IOException {
TimelineWriter writer = mock(TimelineWriter.class); TimelineWriter writer = mock(TimelineWriter.class);
TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth. TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth.
TimelineHealthStatus.RUNNING, ""); TimelineHealthStatus.RUNNING, "");
@ -329,32 +330,32 @@ private static TimelineMetric createDummyMetric(long ts, Long value) {
} }
@Test @Test
public void testClearPreviousEntitiesOnAggregation() throws Exception { void testClearPreviousEntitiesOnAggregation() throws Exception {
final long ts = System.currentTimeMillis(); final long ts = System.currentTimeMillis();
TimelineCollector collector = new TimelineCollector("") { TimelineCollector collector = new TimelineCollector("") {
@Override @Override
public TimelineCollectorContext getTimelineEntityContext() { public TimelineCollectorContext getTimelineEntityContext() {
return new TimelineCollectorContext("cluster", "user", "flow", "1", return new TimelineCollectorContext("cluster", "user", "flow", "1",
1L, ApplicationId.newInstance(ts, 1).toString()); 1L, ApplicationId.newInstance(ts, 1).toString());
} }
}; };
TimelineWriter writer = mock(TimelineWriter.class); TimelineWriter writer = mock(TimelineWriter.class);
TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth. TimelineHealth timelineHealth = new TimelineHealth(TimelineHealth.
TimelineHealthStatus.RUNNING, ""); TimelineHealthStatus.RUNNING, "");
when(writer.getHealthStatus()).thenReturn(timelineHealth); when(writer.getHealthStatus()).thenReturn(timelineHealth);
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 5); conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 5);
conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS, conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS,
500L); 500L);
collector.init(conf); collector.init(conf);
collector.setWriter(writer); collector.setWriter(writer);
// Put 5 entities with different metric values. // Put 5 entities with different metric values.
TimelineEntities entities = new TimelineEntities(); TimelineEntities entities = new TimelineEntities();
for (int i = 1; i <=5; i++) { for (int i = 1; i <= 5; i++) {
TimelineEntity entity = createEntity("e" + i, "type"); TimelineEntity entity = createEntity("e" + i, "type");
entity.addMetric(createDummyMetric(ts + i, Long.valueOf(i * 50))); entity.addMetric(createDummyMetric(ts + i, Long.valueOf(i * 50)));
entities.addEntity(entity); entities.addEntity(entity);
@ -368,7 +369,7 @@ public TimelineCollectorContext getTimelineEntityContext() {
assertEquals(Sets.newHashSet("type"), aggregationGroups.keySet()); assertEquals(Sets.newHashSet("type"), aggregationGroups.keySet());
TimelineEntity aggregatedEntity = TimelineCollector. TimelineEntity aggregatedEntity = TimelineCollector.
aggregateWithoutGroupId(aggregationGroups, currContext.getAppId(), aggregateWithoutGroupId(aggregationGroups, currContext.getAppId(),
TimelineEntityType.YARN_APPLICATION.toString()); TimelineEntityType.YARN_APPLICATION.toString());
TimelineMetric aggregatedMetric = TimelineMetric aggregatedMetric =
aggregatedEntity.getMetrics().iterator().next(); aggregatedEntity.getMetrics().iterator().next();
assertEquals(750L, aggregatedMetric.getValues().values().iterator().next()); assertEquals(750L, aggregatedMetric.getValues().values().iterator().next());
@ -378,7 +379,7 @@ public TimelineCollectorContext getTimelineEntityContext() {
// Aggregate entities. // Aggregate entities.
aggregatedEntity = TimelineCollector. aggregatedEntity = TimelineCollector.
aggregateWithoutGroupId(aggregationGroups, currContext.getAppId(), aggregateWithoutGroupId(aggregationGroups, currContext.getAppId(),
TimelineEntityType.YARN_APPLICATION.toString()); TimelineEntityType.YARN_APPLICATION.toString());
aggregatedMetric = aggregatedEntity.getMetrics().iterator().next(); aggregatedMetric = aggregatedEntity.getMetrics().iterator().next();
// No values aggregated as no metrics put for an entity between this // No values aggregated as no metrics put for an entity between this
// aggregation and the previous one. // aggregation and the previous one.
@ -388,7 +389,7 @@ public TimelineCollectorContext getTimelineEntityContext() {
// Put 3 entities. // Put 3 entities.
entities = new TimelineEntities(); entities = new TimelineEntities();
for (int i = 1; i <=3; i++) { for (int i = 1; i <= 3; i++) {
TimelineEntity entity = createEntity("e" + i, "type"); TimelineEntity entity = createEntity("e" + i, "type");
entity.addMetric(createDummyMetric(System.currentTimeMillis() + i, 50L)); entity.addMetric(createDummyMetric(System.currentTimeMillis() + i, 50L));
entities.addEntity(entity); entities.addEntity(entity);
@ -399,7 +400,7 @@ public TimelineCollectorContext getTimelineEntityContext() {
// Aggregate entities. // Aggregate entities.
aggregatedEntity = TimelineCollector. aggregatedEntity = TimelineCollector.
aggregateWithoutGroupId(aggregationGroups, currContext.getAppId(), aggregateWithoutGroupId(aggregationGroups, currContext.getAppId(),
TimelineEntityType.YARN_APPLICATION.toString()); TimelineEntityType.YARN_APPLICATION.toString());
// Last 3 entities picked up for aggregation. // Last 3 entities picked up for aggregation.
aggregatedMetric = aggregatedEntity.getMetrics().iterator().next(); aggregatedMetric = aggregatedEntity.getMetrics().iterator().next();
assertEquals(150L, aggregatedMetric.getValues().values().iterator().next()); assertEquals(150L, aggregatedMetric.getValues().values().iterator().next());

View File

@ -18,38 +18,49 @@
package org.apache.hadoop.yarn.server.timelineservice.collector; package org.apache.hadoop.yarn.server.timelineservice.collector;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
import org.junit.Test;
import static org.junit.jupiter.api.Assertions.assertThrows;
/** /**
* Unit tests for TimelineCollectorManager. * Unit tests for TimelineCollectorManager.
*/ */
public class TestTimelineCollectorManager{ public class TestTimelineCollectorManager{
@Test(timeout = 60000, expected = YarnRuntimeException.class) @Test
public void testTimelineCollectorManagerWithInvalidTimelineWriter() { @Timeout(60000)
Configuration conf = new YarnConfiguration(); void testTimelineCollectorManagerWithInvalidTimelineWriter() {
conf.set(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS, assertThrows(YarnRuntimeException.class, () -> {
Object.class.getName()); Configuration conf = new YarnConfiguration();
runTimelineCollectorManagerWithConfig(conf); conf.set(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
Object.class.getName());
runTimelineCollectorManagerWithConfig(conf);
});
} }
@Test(timeout = 60000, expected = YarnRuntimeException.class) @Test
public void testTimelineCollectorManagerWithNonexistentTimelineWriter() { @Timeout(60000)
String nonexistentTimelineWriterClass = "org.apache.org.yarn.server." + void testTimelineCollectorManagerWithNonexistentTimelineWriter() {
"timelineservice.storage.XXXXXXXX"; assertThrows(YarnRuntimeException.class, () -> {
Configuration conf = new YarnConfiguration(); String nonexistentTimelineWriterClass = "org.apache.org.yarn.server." +
conf.set(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS, "timelineservice.storage.XXXXXXXX";
nonexistentTimelineWriterClass); Configuration conf = new YarnConfiguration();
runTimelineCollectorManagerWithConfig(conf); conf.set(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
nonexistentTimelineWriterClass);
runTimelineCollectorManagerWithConfig(conf);
});
} }
@Test(timeout = 60000) @Test
public void testTimelineCollectorManagerWithFileSystemWriter() { @Timeout(60000)
void testTimelineCollectorManagerWithFileSystemWriter() {
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS, conf.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
FileSystemTimelineWriterImpl.class, TimelineWriter.class); FileSystemTimelineWriterImpl.class, TimelineWriter.class);

View File

@ -18,11 +18,14 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.yarn.server.timelineservice.metrics.TimelineReaderMetrics; import org.apache.hadoop.yarn.server.timelineservice.metrics.TimelineReaderMetrics;
import org.junit.After;
import org.junit.Assert; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.Before; import static org.junit.jupiter.api.Assertions.assertNotNull;
import org.junit.Test;
/** /**
* Test TimelineReaderMetrics. * Test TimelineReaderMetrics.
@ -32,24 +35,24 @@ public class TestTimelineReaderMetrics {
private TimelineReaderMetrics metrics; private TimelineReaderMetrics metrics;
@Test @Test
public void testTimelineReaderMetrics() { void testTimelineReaderMetrics() {
Assert.assertNotNull(metrics); assertNotNull(metrics);
Assert.assertEquals(10, assertEquals(10,
metrics.getGetEntitiesSuccessLatency().getInterval()); metrics.getGetEntitiesSuccessLatency().getInterval());
Assert.assertEquals(10, assertEquals(10,
metrics.getGetEntitiesFailureLatency().getInterval()); metrics.getGetEntitiesFailureLatency().getInterval());
Assert.assertEquals(10, assertEquals(10,
metrics.getGetEntityTypesSuccessLatency().getInterval()); metrics.getGetEntityTypesSuccessLatency().getInterval());
Assert.assertEquals(10, assertEquals(10,
metrics.getGetEntityTypesFailureLatency().getInterval()); metrics.getGetEntityTypesFailureLatency().getInterval());
} }
@Before @BeforeEach
public void setup() { public void setup() {
metrics = TimelineReaderMetrics.getInstance(); metrics = TimelineReaderMetrics.getInstance();
} }
@After @AfterEach
public void tearDown() { public void tearDown() {
TimelineReaderMetrics.destroy(); TimelineReaderMetrics.destroy();
} }

View File

@ -18,7 +18,8 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertEquals; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.Service.STATE; import org.apache.hadoop.service.Service.STATE;
@ -26,12 +27,15 @@
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineReaderImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
import org.junit.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestTimelineReaderServer { public class TestTimelineReaderServer {
@Test(timeout = 60000) @Test
public void testStartStopServer() throws Exception { @Timeout(60000)
void testStartStopServer() throws Exception {
@SuppressWarnings("resource") @SuppressWarnings("resource")
TimelineReaderServer server = new TimelineReaderServer(); TimelineReaderServer server = new TimelineReaderServer();
Configuration config = new YarnConfiguration(); Configuration config = new YarnConfiguration();
@ -56,30 +60,36 @@ public void testStartStopServer() throws Exception {
} }
} }
@Test(timeout = 60000, expected = YarnRuntimeException.class) @Test
public void testTimelineReaderServerWithInvalidTimelineReader() { @Timeout(60000)
Configuration conf = new YarnConfiguration(); void testTimelineReaderServerWithInvalidTimelineReader() {
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); assertThrows(YarnRuntimeException.class, () -> {
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f); Configuration conf = new YarnConfiguration();
conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_WEBAPP_ADDRESS, conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
"localhost:0"); conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_CLASS, conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_WEBAPP_ADDRESS,
Object.class.getName()); "localhost:0");
runTimelineReaderServerWithConfig(conf); conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_CLASS,
Object.class.getName());
runTimelineReaderServerWithConfig(conf);
});
} }
@Test(timeout = 60000, expected = YarnRuntimeException.class) @Test
public void testTimelineReaderServerWithNonexistentTimelineReader() { @Timeout(60000)
String nonexistentTimelineReaderClass = "org.apache.org.yarn.server." + void testTimelineReaderServerWithNonexistentTimelineReader() {
"timelineservice.storage.XXXXXXXX"; assertThrows(YarnRuntimeException.class, () -> {
Configuration conf = new YarnConfiguration(); String nonexistentTimelineReaderClass = "org.apache.org.yarn.server." +
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); "timelineservice.storage.XXXXXXXX";
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f); Configuration conf = new YarnConfiguration();
conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_WEBAPP_ADDRESS, conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
"localhost:0"); conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_CLASS, conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_WEBAPP_ADDRESS,
nonexistentTimelineReaderClass); "localhost:0");
runTimelineReaderServerWithConfig(conf); conf.set(YarnConfiguration.TIMELINE_SERVICE_READER_CLASS,
nonexistentTimelineReaderClass);
runTimelineReaderServerWithConfig(conf);
});
} }
/** /**

View File

@ -18,38 +18,38 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.util.List; import java.util.List;
import org.junit.Test; import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
public class TestTimelineReaderUtils { public class TestTimelineReaderUtils {
@Test @Test
public void testSplitUsingEscapeAndDelimChar() throws Exception { void testSplitUsingEscapeAndDelimChar() throws Exception {
List<String> list = List<String> list =
TimelineReaderUtils.split("*!cluster!*!b**o***!xer!oozie**", '!', '*'); TimelineReaderUtils.split("*!cluster!*!b**o***!xer!oozie**", '!', '*');
String[] arr = new String[list.size()]; String[] arr = new String[list.size()];
arr = list.toArray(arr); arr = list.toArray(arr);
assertArrayEquals(new String[] {"!cluster", "!b*o*!xer", "oozie*"}, arr); assertArrayEquals(new String[]{"!cluster", "!b*o*!xer", "oozie*"}, arr);
list = TimelineReaderUtils.split("*!cluster!*!b**o***!xer!!", '!', '*'); list = TimelineReaderUtils.split("*!cluster!*!b**o***!xer!!", '!', '*');
arr = new String[list.size()]; arr = new String[list.size()];
arr = list.toArray(arr); arr = list.toArray(arr);
assertArrayEquals(new String[] {"!cluster", "!b*o*!xer", "", ""}, arr); assertArrayEquals(new String[]{"!cluster", "!b*o*!xer", "", ""}, arr);
} }
@Test @Test
public void testJoinAndEscapeStrings() throws Exception { void testJoinAndEscapeStrings() throws Exception {
assertEquals("*!cluster!*!b**o***!xer!oozie**", assertEquals("*!cluster!*!b**o***!xer!oozie**",
TimelineReaderUtils.joinAndEscapeStrings( TimelineReaderUtils.joinAndEscapeStrings(
new String[] {"!cluster", "!b*o*!xer", "oozie*"}, '!', '*')); new String[]{"!cluster", "!b*o*!xer", "oozie*"}, '!', '*'));
assertEquals("*!cluster!*!b**o***!xer!!", assertEquals("*!cluster!*!b**o***!xer!!",
TimelineReaderUtils.joinAndEscapeStrings( TimelineReaderUtils.joinAndEscapeStrings(
new String[] {"!cluster", "!b*o*!xer", "", ""}, '!', '*')); new String[]{"!cluster", "!b*o*!xer", "", ""}, '!', '*'));
assertNull(TimelineReaderUtils.joinAndEscapeStrings( assertNull(TimelineReaderUtils.joinAndEscapeStrings(
new String[] {"!cluster", "!b*o*!xer", null, ""}, '!', '*')); new String[]{"!cluster", "!b*o*!xer", null, ""}, '!', '*'));
} }
} }

View File

@ -18,11 +18,6 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException; import java.lang.reflect.UndeclaredThrowableException;
@ -30,9 +25,22 @@
import java.net.URI; import java.net.URI;
import java.net.URL; import java.net.URL;
import java.util.Set; import java.util.Set;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.http.JettyUtils;
@ -45,21 +53,12 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.TestFileSystemTimelineReaderImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.TestFileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.sun.jersey.api.client.Client; import static org.junit.jupiter.api.Assertions.assertEquals;
import com.sun.jersey.api.client.ClientResponse; import static org.junit.jupiter.api.Assertions.assertFalse;
import com.sun.jersey.api.client.ClientResponse.Status; import static org.junit.jupiter.api.Assertions.assertNotNull;
import com.sun.jersey.api.client.GenericType; import static org.junit.jupiter.api.Assertions.assertTrue;
import com.sun.jersey.api.client.config.ClientConfig; import static org.junit.jupiter.api.Assertions.fail;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
public class TestTimelineReaderWebServices { public class TestTimelineReaderWebServices {
@ -69,17 +68,17 @@ public class TestTimelineReaderWebServices {
private int serverPort; private int serverPort;
private TimelineReaderServer server; private TimelineReaderServer server;
@BeforeClass @BeforeAll
public static void setup() throws Exception { public static void setup() throws Exception {
TestFileSystemTimelineReaderImpl.initializeDataDirectory(ROOT_DIR); TestFileSystemTimelineReaderImpl.initializeDataDirectory(ROOT_DIR);
} }
@AfterClass @AfterAll
public static void tearDown() throws Exception { public static void tearDown() throws Exception {
FileUtils.deleteDirectory(new File(ROOT_DIR)); FileUtils.deleteDirectory(new File(ROOT_DIR));
} }
@Before @BeforeEach
public void init() throws Exception { public void init() throws Exception {
try { try {
Configuration config = new YarnConfiguration(); Configuration config = new YarnConfiguration();
@ -97,11 +96,11 @@ public void init() throws Exception {
server.start(); server.start();
serverPort = server.getWebServerPort(); serverPort = server.getWebServerPort();
} catch (Exception e) { } catch (Exception e) {
Assert.fail("Web server failed to start"); fail("Web server failed to start");
} }
} }
@After @AfterEach
public void stop() throws Exception { public void stop() throws Exception {
if (server != null) { if (server != null) {
server.stop(); server.stop();
@ -165,21 +164,21 @@ public HttpURLConnection getHttpURLConnection(final URL url)
} }
@Test @Test
public void testAbout() throws Exception { void testAbout() throws Exception {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"); URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/");
Client client = createClient(); Client client = createClient();
try { try {
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
TimelineAbout about = resp.getEntity(TimelineAbout.class); TimelineAbout about = resp.getEntity(TimelineAbout.class);
Assert.assertNotNull(about); assertNotNull(about);
Assert.assertEquals("Timeline Reader API", about.getAbout()); assertEquals("Timeline Reader API", about.getAbout());
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntityDefaultView() throws Exception { void testGetEntityDefaultView() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -191,7 +190,7 @@ public void testGetEntityDefaultView() throws Exception {
assertNotNull(entity); assertNotNull(entity);
assertEquals("id_1", entity.getId()); assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType()); assertEquals("app", entity.getType());
assertEquals((Long)1425016502000L, entity.getCreatedTime()); assertEquals((Long) 1425016502000L, entity.getCreatedTime());
// Default view i.e. when no fields are specified, entity contains only // Default view i.e. when no fields are specified, entity contains only
// entity id, entity type and created time. // entity id, entity type and created time.
assertEquals(0, entity.getConfigs().size()); assertEquals(0, entity.getConfigs().size());
@ -202,7 +201,7 @@ public void testGetEntityDefaultView() throws Exception {
} }
@Test @Test
public void testGetEntityWithUserAndFlowInfo() throws Exception { void testGetEntityWithUserAndFlowInfo() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -215,14 +214,14 @@ public void testGetEntityWithUserAndFlowInfo() throws Exception {
assertNotNull(entity); assertNotNull(entity);
assertEquals("id_1", entity.getId()); assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType()); assertEquals("app", entity.getType());
assertEquals((Long)1425016502000L, entity.getCreatedTime()); assertEquals((Long) 1425016502000L, entity.getCreatedTime());
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntityCustomFields() throws Exception { void testGetEntityCustomFields() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
// Fields are case insensitive. // Fields are case insensitive.
@ -238,8 +237,8 @@ public void testGetEntityCustomFields() throws Exception {
assertEquals("app", entity.getType()); assertEquals("app", entity.getType());
assertEquals(3, entity.getConfigs().size()); assertEquals(3, entity.getConfigs().size());
assertEquals(3, entity.getMetrics().size()); assertEquals(3, entity.getMetrics().size());
assertTrue("UID should be present", assertTrue(entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY),
entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY)); "UID should be present");
// Includes UID. // Includes UID.
assertEquals(3, entity.getInfo().size()); assertEquals(3, entity.getInfo().size());
// No events will be returned as events are not part of fields. // No events will be returned as events are not part of fields.
@ -250,7 +249,7 @@ public void testGetEntityCustomFields() throws Exception {
} }
@Test @Test
public void testGetEntityAllFields() throws Exception { void testGetEntityAllFields() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -265,8 +264,8 @@ public void testGetEntityAllFields() throws Exception {
assertEquals("app", entity.getType()); assertEquals("app", entity.getType());
assertEquals(3, entity.getConfigs().size()); assertEquals(3, entity.getConfigs().size());
assertEquals(3, entity.getMetrics().size()); assertEquals(3, entity.getMetrics().size());
assertTrue("UID should be present", assertTrue(entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY),
entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY)); "UID should be present");
// Includes UID. // Includes UID.
assertEquals(3, entity.getInfo().size()); assertEquals(3, entity.getInfo().size());
assertEquals(2, entity.getEvents().size()); assertEquals(2, entity.getEvents().size());
@ -276,7 +275,7 @@ public void testGetEntityAllFields() throws Exception {
} }
@Test @Test
public void testGetEntityNotPresent() throws Exception { void testGetEntityNotPresent() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -288,7 +287,7 @@ public void testGetEntityNotPresent() throws Exception {
} }
@Test @Test
public void testQueryWithoutCluster() throws Exception { void testQueryWithoutCluster() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -305,7 +304,8 @@ public void testQueryWithoutCluster() throws Exception {
"timeline/apps/app1/entities/app"); "timeline/apps/app1/entities/app");
resp = getResponse(client, uri); resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
@ -316,52 +316,55 @@ public void testQueryWithoutCluster() throws Exception {
} }
@Test @Test
public void testGetEntities() throws Exception { void testGetEntities() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app"); "timeline/clusters/cluster1/apps/app1/entities/app");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(4, entities.size()); assertEquals(4, entities.size());
assertTrue("Entities id_1, id_2, id_3 and id_4 should have been" + assertTrue(entities.contains(newEntity("app", "id_1")) &&
" present in response",
entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_2")) && entities.contains(newEntity("app", "id_2")) &&
entities.contains(newEntity("app", "id_3")) && entities.contains(newEntity("app", "id_3")) &&
entities.contains(newEntity("app", "id_4"))); entities.contains(newEntity("app", "id_4")),
"Entities id_1, id_2, id_3 and id_4 should have been" +
" present in response");
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntitiesWithLimit() throws Exception { void testGetEntitiesWithLimit() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?limit=2"); "timeline/clusters/cluster1/apps/app1/entities/app?limit=2");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(2, entities.size()); assertEquals(2, entities.size());
// Entities returned are based on most recent created time. // Entities returned are based on most recent created time.
assertTrue("Entities with id_1 and id_4 should have been present " + assertTrue(entities.contains(newEntity("app", "id_1")) &&
"in response based on entity created time.", entities.contains(newEntity("app", "id_4")),
entities.contains(newEntity("app", "id_1")) && "Entities with id_1 and id_4 should have been present " +
entities.contains(newEntity("app", "id_4"))); "in response based on entity created time.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?limit=3"); "clusters/cluster1/apps/app1/entities/app?limit=3");
resp = getResponse(client, uri); resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
@ -374,7 +377,7 @@ public void testGetEntitiesWithLimit() throws Exception {
} }
@Test @Test
public void testGetEntitiesBasedOnCreatedTime() throws Exception { void testGetEntitiesBasedOnCreatedTime() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -382,44 +385,47 @@ public void testGetEntitiesBasedOnCreatedTime() throws Exception {
"createdtimestart=1425016502030&createdtimeend=1425016502060"); "createdtimestart=1425016502030&createdtimeend=1425016502060");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_4 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_4")),
entities.contains(newEntity("app", "id_4"))); "Entity with id_4 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?createdtimeend" + "clusters/cluster1/apps/app1/entities/app?createdtimeend" +
"=1425016502010"); "=1425016502010");
resp = getResponse(client, uri); resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(3, entities.size()); assertEquals(3, entities.size());
assertFalse("Entity with id_4 should not have been present in response.", assertFalse(entities.contains(newEntity("app", "id_4")),
entities.contains(newEntity("app", "id_4"))); "Entity with id_4 should not have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?createdtimestart=" + "clusters/cluster1/apps/app1/entities/app?createdtimestart=" +
"1425016502010"); "1425016502010");
resp = getResponse(client, uri); resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_4 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_4")),
entities.contains(newEntity("app", "id_4"))); "Entity with id_4 should have been present in response.");
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntitiesByRelations() throws Exception { void testGetEntitiesByRelations() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -427,44 +433,47 @@ public void testGetEntitiesByRelations() throws Exception {
"flow:flow1"); "flow:flow1");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_1 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_1")),
entities.contains(newEntity("app", "id_1"))); "Entity with id_1 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?isrelatedto=" + "clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
"type1:tid1_2,type2:tid2_1%60"); "type1:tid1_2,type2:tid2_1%60");
resp = getResponse(client, uri); resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_1 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_1")),
entities.contains(newEntity("app", "id_1"))); "Entity with id_1 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?isrelatedto=" + "clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
"type1:tid1_1:tid1_2,type2:tid2_1%60"); "type1:tid1_1:tid1_2,type2:tid2_1%60");
resp = getResponse(client, uri); resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_1 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_1")),
entities.contains(newEntity("app", "id_1"))); "Entity with id_1 should have been present in response.");
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntitiesByConfigFilters() throws Exception { void testGetEntitiesByConfigFilters() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -472,20 +481,21 @@ public void testGetEntitiesByConfigFilters() throws Exception {
"conffilters=config_1%20eq%20123%20AND%20config_3%20eq%20abc"); "conffilters=config_1%20eq%20123%20AND%20config_3%20eq%20abc");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_3 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_3")),
entities.contains(newEntity("app", "id_3"))); "Entity with id_3 should have been present in response.");
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntitiesByInfoFilters() throws Exception { void testGetEntitiesByInfoFilters() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -493,20 +503,21 @@ public void testGetEntitiesByInfoFilters() throws Exception {
"infofilters=info2%20eq%203.5"); "infofilters=info2%20eq%203.5");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_3 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_3")),
entities.contains(newEntity("app", "id_3"))); "Entity with id_3 should have been present in response.");
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntitiesByMetricFilters() throws Exception { void testGetEntitiesByMetricFilters() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -514,22 +525,23 @@ public void testGetEntitiesByMetricFilters() throws Exception {
"metricfilters=metric3%20ge%200"); "metricfilters=metric3%20ge%200");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(2, entities.size()); assertEquals(2, entities.size());
assertTrue("Entities with id_1 and id_2 should have been present" + assertTrue(entities.contains(newEntity("app", "id_1")) &&
" in response.", entities.contains(newEntity("app", "id_2")),
entities.contains(newEntity("app", "id_1")) && "Entities with id_1 and id_2 should have been present" +
entities.contains(newEntity("app", "id_2"))); " in response.");
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntitiesByEventFilters() throws Exception { void testGetEntitiesByEventFilters() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -537,31 +549,33 @@ public void testGetEntitiesByEventFilters() throws Exception {
"eventfilters=event_2,event_4"); "eventfilters=event_2,event_4");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
assertEquals(1, entities.size()); assertEquals(1, entities.size());
assertTrue("Entity with id_3 should have been present in response.", assertTrue(entities.contains(newEntity("app", "id_3")),
entities.contains(newEntity("app", "id_3"))); "Entity with id_3 should have been present in response.");
} finally { } finally {
client.destroy(); client.destroy();
} }
} }
@Test @Test
public void testGetEntitiesNoMatch() throws Exception { void testGetEntitiesNoMatch() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" + "timeline/clusters/cluster1/apps/app1/entities/app?" +
"metricfilters=metric7%20ge%200&isrelatedto=type1:tid1_1:tid1_2,"+ "metricfilters=metric7%20ge%200&isrelatedto=type1:tid1_1:tid1_2," +
"type2:tid2_1%60&relatesto=flow:flow1&eventfilters=event_2,event_4" + "type2:tid2_1%60&relatesto=flow:flow1&eventfilters=event_2,event_4" +
"&infofilters=info2%20eq%203.5&createdtimestart=1425016502030&" + "&infofilters=info2%20eq%203.5&createdtimestart=1425016502030&" +
"createdtimeend=1425016502060"); "createdtimeend=1425016502060");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString()); resp.getType().toString());
assertNotNull(entities); assertNotNull(entities);
@ -572,7 +586,7 @@ public void testGetEntitiesNoMatch() throws Exception {
} }
@Test @Test
public void testInvalidValuesHandling() throws Exception { void testInvalidValuesHandling() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
@ -592,7 +606,7 @@ public void testInvalidValuesHandling() throws Exception {
} }
@Test @Test
public void testGetAppAttempts() throws Exception { void testGetAppAttempts() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
@ -608,15 +622,15 @@ public void testGetAppAttempts() throws Exception {
int totalEntities = entities.size(); int totalEntities = entities.size();
assertEquals(2, totalEntities); assertEquals(2, totalEntities);
assertTrue( assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains( entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-1"))); "app-attempt-1")),
"Entity with app-attempt-2 should have been present in response.");
assertTrue( assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains( entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-2"))); "app-attempt-2")),
"Entity with app-attempt-2 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/appattempts"); + "timeline/clusters/cluster1/apps/app1/appattempts");
@ -628,15 +642,15 @@ public void testGetAppAttempts() throws Exception {
int retrievedEntity = entities.size(); int retrievedEntity = entities.size();
assertEquals(2, retrievedEntity); assertEquals(2, retrievedEntity);
assertTrue( assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains( entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-1"))); "app-attempt-1")),
"Entity with app-attempt-2 should have been present in response.");
assertTrue( assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains( entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-2"))); "app-attempt-2")),
"Entity with app-attempt-2 should have been present in response.");
assertEquals(totalEntities, retrievedEntity); assertEquals(totalEntities, retrievedEntity);
@ -646,7 +660,7 @@ public void testGetAppAttempts() throws Exception {
} }
@Test @Test
public void testGetAppAttempt() throws Exception { void testGetAppAttempt() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
@ -677,7 +691,7 @@ public void testGetAppAttempt() throws Exception {
} }
@Test @Test
public void testGetContainers() throws Exception { void testGetContainers() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
// total 3 containers in a application. // total 3 containers in a application.
@ -693,17 +707,17 @@ public void testGetContainers() throws Exception {
int totalEntities = entities.size(); int totalEntities = entities.size();
assertEquals(3, totalEntities); assertEquals(3, totalEntities);
assertTrue( assertTrue(
"Entity with container_1_1 should have been present in response.",
entities.contains(newEntity( entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1"))); TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")),
"Entity with container_1_1 should have been present in response.");
assertTrue( assertTrue(
"Entity with container_2_1 should have been present in response.",
entities.contains(newEntity( entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1"))); TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")),
"Entity with container_2_1 should have been present in response.");
assertTrue( assertTrue(
"Entity with container_2_2 should have been present in response.",
entities.contains(newEntity( entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2"))); TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")),
"Entity with container_2_2 should have been present in response.");
// for app-attempt1 1 container has run // for app-attempt1 1 container has run
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
@ -717,9 +731,9 @@ public void testGetContainers() throws Exception {
int retrievedEntity = entities.size(); int retrievedEntity = entities.size();
assertEquals(1, retrievedEntity); assertEquals(1, retrievedEntity);
assertTrue( assertTrue(
"Entity with container_1_1 should have been present in response.",
entities.contains(newEntity( entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1"))); TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")),
"Entity with container_1_1 should have been present in response.");
// for app-attempt2 2 containers has run // for app-attempt2 2 containers has run
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
@ -733,13 +747,13 @@ public void testGetContainers() throws Exception {
retrievedEntity += entities.size(); retrievedEntity += entities.size();
assertEquals(2, entities.size()); assertEquals(2, entities.size());
assertTrue( assertTrue(
"Entity with container_2_1 should have been present in response.",
entities.contains(newEntity( entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1"))); TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")),
"Entity with container_2_1 should have been present in response.");
assertTrue( assertTrue(
"Entity with container_2_2 should have been present in response.",
entities.contains(newEntity( entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2"))); TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")),
"Entity with container_2_2 should have been present in response.");
assertEquals(totalEntities, retrievedEntity); assertEquals(totalEntities, retrievedEntity);
@ -749,7 +763,7 @@ public void testGetContainers() throws Exception {
} }
@Test @Test
public void testGetContainer() throws Exception { void testGetContainer() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
@ -780,11 +794,11 @@ public void testGetContainer() throws Exception {
} }
@Test @Test
public void testHealthCheck() throws Exception { void testHealthCheck() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/health"); + "timeline/health");
ClientResponse resp = getResponse(client, uri); ClientResponse resp = getResponse(client, uri);
TimelineHealth timelineHealth = TimelineHealth timelineHealth =
resp.getEntity(new GenericType<TimelineHealth>() { resp.getEntity(new GenericType<TimelineHealth>() {

View File

@ -18,33 +18,14 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException; import java.lang.reflect.UndeclaredThrowableException;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URI; import java.net.URI;
import java.net.URL; import java.net.URL;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TestFileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status; import com.sun.jersey.api.client.ClientResponse.Status;
@ -52,6 +33,24 @@
import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory; import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler; import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TestFileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/** /**
* Tests ACL check while retrieving entity-types per application. * Tests ACL check while retrieving entity-types per application.
@ -66,17 +65,17 @@ public class TestTimelineReaderWebServicesACL {
private TimelineReaderServer server; private TimelineReaderServer server;
private static final String ADMIN = "yarn"; private static final String ADMIN = "yarn";
@BeforeClass @BeforeAll
public static void setup() throws Exception { public static void setup() throws Exception {
TestFileSystemTimelineReaderImpl.initializeDataDirectory(ROOT_DIR); TestFileSystemTimelineReaderImpl.initializeDataDirectory(ROOT_DIR);
} }
@AfterClass @AfterAll
public static void tearDown() throws Exception { public static void tearDown() throws Exception {
FileUtils.deleteDirectory(new File(ROOT_DIR)); FileUtils.deleteDirectory(new File(ROOT_DIR));
} }
@Before @BeforeEach
public void init() throws Exception { public void init() throws Exception {
try { try {
Configuration config = new YarnConfiguration(); Configuration config = new YarnConfiguration();
@ -97,11 +96,11 @@ public void init() throws Exception {
server.start(); server.start();
serverPort = server.getWebServerPort(); serverPort = server.getWebServerPort();
} catch (Exception e) { } catch (Exception e) {
Assert.fail("Web server failed to start"); fail("Web server failed to start");
} }
} }
@After @AfterEach
public void stop() throws Exception { public void stop() throws Exception {
if (server != null) { if (server != null) {
server.stop(); server.stop();
@ -141,35 +140,35 @@ public HttpURLConnection getHttpURLConnection(final URL url)
} }
@Test @Test
public void testGetEntityTypes() throws Exception { void testGetEntityTypes() throws Exception {
Client client = createClient(); Client client = createClient();
try { try {
String unAuthorizedUser ="user2"; String unAuthorizedUser = "user2";
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entity-types?user.name="+unAuthorizedUser); "timeline/apps/app1/entity-types?user.name=" + unAuthorizedUser);
String msg = "User " + unAuthorizedUser String msg = "User " + unAuthorizedUser
+ " is not allowed to read TimelineService V2 data."; + " is not allowed to read TimelineService V2 data.";
ClientResponse resp = verifyHttpResponse(client, uri, Status.FORBIDDEN); ClientResponse resp = verifyHttpResponse(client, uri, Status.FORBIDDEN);
assertTrue(resp.getEntity(String.class).contains(msg)); assertTrue(resp.getEntity(String.class).contains(msg));
String authorizedUser ="user1"; String authorizedUser = "user1";
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entity-types?user.name="+authorizedUser); "timeline/apps/app1/entity-types?user.name=" + authorizedUser);
verifyHttpResponse(client, uri, Status.OK); verifyHttpResponse(client, uri, Status.OK);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entity-types?user.name="+ADMIN); "timeline/apps/app1/entity-types?user.name=" + ADMIN);
verifyHttpResponse(client, uri, Status.OK); verifyHttpResponse(client, uri, Status.OK);
// Verify with Query Parameter userid // Verify with Query Parameter userid
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entity-types?user.name="+authorizedUser "timeline/apps/app1/entity-types?user.name=" + authorizedUser
+ "&userid="+authorizedUser); + "&userid=" + authorizedUser);
verifyHttpResponse(client, uri, Status.OK); verifyHttpResponse(client, uri, Status.OK);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entity-types?user.name="+authorizedUser "timeline/apps/app1/entity-types?user.name=" + authorizedUser
+ "&userid="+unAuthorizedUser); + "&userid=" + unAuthorizedUser);
verifyHttpResponse(client, uri, Status.FORBIDDEN); verifyHttpResponse(client, uri, Status.FORBIDDEN);
} finally { } finally {
client.destroy(); client.destroy();

View File

@ -18,18 +18,23 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import java.util.LinkedHashSet;
import java.util.Set;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.webapp.ForbiddenException; import org.apache.hadoop.yarn.webapp.ForbiddenException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.LinkedHashSet; import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Set; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class TestTimelineReaderWebServicesBasicAcl { public class TestTimelineReaderWebServicesBasicAcl {
@ -39,11 +44,11 @@ public class TestTimelineReaderWebServicesBasicAcl {
UserGroupInformation.createRemoteUser(adminUser); UserGroupInformation.createRemoteUser(adminUser);
private Configuration config; private Configuration config;
@Before public void setUp() throws Exception { @BeforeEach public void setUp() throws Exception {
config = new YarnConfiguration(); config = new YarnConfiguration();
} }
@After public void tearDown() throws Exception { @AfterEach public void tearDown() throws Exception {
if (manager != null) { if (manager != null) {
manager.stop(); manager.stop();
manager = null; manager = null;
@ -51,7 +56,8 @@ public class TestTimelineReaderWebServicesBasicAcl {
config = null; config = null;
} }
@Test public void testTimelineReaderManagerAclsWhenDisabled() @Test
void testTimelineReaderManagerAclsWhenDisabled()
throws Exception { throws Exception {
config.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); config.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false);
config.set(YarnConfiguration.YARN_ADMIN_ACL, adminUser); config.set(YarnConfiguration.YARN_ADMIN_ACL, adminUser);
@ -60,14 +66,15 @@ public class TestTimelineReaderWebServicesBasicAcl {
manager.start(); manager.start();
// when acls are disabled, always return true // when acls are disabled, always return true
Assert.assertTrue(manager.checkAccess(null)); assertTrue(manager.checkAccess(null));
// filter is disabled, so should return false // filter is disabled, so should return false
Assert.assertFalse( assertFalse(
TimelineReaderWebServices.isDisplayEntityPerUserFilterEnabled(config)); TimelineReaderWebServices.isDisplayEntityPerUserFilterEnabled(config));
} }
@Test public void testTimelineReaderManagerAclsWhenEnabled() @Test
void testTimelineReaderManagerAclsWhenEnabled()
throws Exception { throws Exception {
Configuration config = new YarnConfiguration(); Configuration config = new YarnConfiguration();
config.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); config.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
@ -85,30 +92,30 @@ public class TestTimelineReaderWebServicesBasicAcl {
UserGroupInformation.createRemoteUser(user2); UserGroupInformation.createRemoteUser(user2);
// false because ugi is null // false because ugi is null
Assert.assertFalse(TimelineReaderWebServices assertFalse(TimelineReaderWebServices
.validateAuthUserWithEntityUser(manager, null, user1)); .validateAuthUserWithEntityUser(manager, null, user1));
// false because ugi is null in non-secure cluster. User must pass // false because ugi is null in non-secure cluster. User must pass
// ?user.name as query params in REST end points. // ?user.name as query params in REST end points.
try { try {
TimelineReaderWebServices.checkAccess(manager, null, user1); TimelineReaderWebServices.checkAccess(manager, null, user1);
Assert.fail("user1Ugi is not allowed to view user1"); fail("user1Ugi is not allowed to view user1");
} catch (ForbiddenException e) { } catch (ForbiddenException e) {
// expected // expected
} }
// incoming ugi is admin asking for entity owner user1 // incoming ugi is admin asking for entity owner user1
Assert.assertTrue( assertTrue(
TimelineReaderWebServices.checkAccess(manager, adminUgi, user1)); TimelineReaderWebServices.checkAccess(manager, adminUgi, user1));
// incoming ugi is admin asking for entity owner user1 // incoming ugi is admin asking for entity owner user1
Assert.assertTrue( assertTrue(
TimelineReaderWebServices.checkAccess(manager, adminUgi, user2)); TimelineReaderWebServices.checkAccess(manager, adminUgi, user2));
// incoming ugi is non-admin i.e user1Ugi asking for entity owner user2 // incoming ugi is non-admin i.e user1Ugi asking for entity owner user2
try { try {
TimelineReaderWebServices.checkAccess(manager, user1Ugi, user2); TimelineReaderWebServices.checkAccess(manager, user1Ugi, user2);
Assert.fail("user1Ugi is not allowed to view user2"); fail("user1Ugi is not allowed to view user2");
} catch (ForbiddenException e) { } catch (ForbiddenException e) {
// expected // expected
} }
@ -116,7 +123,7 @@ public class TestTimelineReaderWebServicesBasicAcl {
// incoming ugi is non-admin i.e user2Ugi asking for entity owner user1 // incoming ugi is non-admin i.e user2Ugi asking for entity owner user1
try { try {
TimelineReaderWebServices.checkAccess(manager, user1Ugi, user2); TimelineReaderWebServices.checkAccess(manager, user1Ugi, user2);
Assert.fail("user2Ugi is not allowed to view user1"); fail("user2Ugi is not allowed to view user1");
} catch (ForbiddenException e) { } catch (ForbiddenException e) {
// expected // expected
} }
@ -127,25 +134,23 @@ public class TestTimelineReaderWebServicesBasicAcl {
TimelineReaderWebServices TimelineReaderWebServices
.checkAccess(manager, adminUgi, entities, userKey, true); .checkAccess(manager, adminUgi, entities, userKey, true);
// admin is allowed to view other entities // admin is allowed to view other entities
Assert.assertTrue(entities.size() == 10); assertEquals(10, entities.size());
// incoming ugi is user1Ugi asking for entities // incoming ugi is user1Ugi asking for entities
// only user1 entities are allowed to view // only user1 entities are allowed to view
entities = createEntities(5, userKey); entities = createEntities(5, userKey);
TimelineReaderWebServices TimelineReaderWebServices
.checkAccess(manager, user1Ugi, entities, userKey, true); .checkAccess(manager, user1Ugi, entities, userKey, true);
Assert.assertTrue(entities.size() == 1); assertEquals(1, entities.size());
Assert assertEquals(user1, entities.iterator().next().getInfo().get(userKey));
.assertEquals(user1, entities.iterator().next().getInfo().get(userKey));
// incoming ugi is user2Ugi asking for entities // incoming ugi is user2Ugi asking for entities
// only user2 entities are allowed to view // only user2 entities are allowed to view
entities = createEntities(8, userKey); entities = createEntities(8, userKey);
TimelineReaderWebServices TimelineReaderWebServices
.checkAccess(manager, user2Ugi, entities, userKey, true); .checkAccess(manager, user2Ugi, entities, userKey, true);
Assert.assertTrue(entities.size() == 1); assertEquals(1, entities.size());
Assert assertEquals(user2, entities.iterator().next().getInfo().get(userKey));
.assertEquals(user2, entities.iterator().next().getInfo().get(userKey));
} }
Set<TimelineEntity> createEntities(int noOfUsers, String userKey) { Set<TimelineEntity> createEntities(int noOfUsers, String userKey) {

View File

@ -18,10 +18,7 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertNotNull; import org.junit.jupiter.api.Test;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.apache.hadoop.util.Sets; import org.apache.hadoop.util.Sets;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
@ -32,20 +29,19 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
import org.junit.Assert;
import org.junit.Test; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.fail;
public class TestTimelineReaderWebServicesUtils { public class TestTimelineReaderWebServicesUtils {
private static void verifyFilterList(String expr, TimelineFilterList list, private static void verifyFilterList(String expr, TimelineFilterList list,
TimelineFilterList expectedList) throws Exception { TimelineFilterList expectedList) {
assertNotNull(list); assertEquals(expectedList, list);
assertTrue("Unexpected List received after parsing expression " + expr +
". Expected=" + expectedList + " but Actual=" + list,
list.equals(expectedList));
} }
@Test @Test
public void testMetricFiltersParsing() throws Exception { void testMetricFiltersParsing() throws Exception {
String expr = "(((key11 ne 234 AND key12 gt 23) AND " + String expr = "(((key11 ne 234 AND key12 gt 23) AND " +
"(key13 lt 34 OR key14 ge 567)) OR (key21 lt 24 OR key22 le 45))"; "(key13 lt 34 OR key14 ge 567)) OR (key21 lt 24 OR key22 le 45))";
TimelineFilterList expectedList = new TimelineFilterList( TimelineFilterList expectedList = new TimelineFilterList(
@ -168,7 +164,7 @@ public void testMetricFiltersParsing() throws Exception {
TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList);
// Test with unnecessary spaces. // Test with unnecessary spaces.
expr = " abc ne 234 AND def gt 23 OR rst lt "+ expr = " abc ne 234 AND def gt 23 OR rst lt " +
" 24 OR xyz le 456 AND pqr ge 2 "; " 24 OR xyz le 456 AND pqr ge 2 ";
expectedList = new TimelineFilterList( expectedList = new TimelineFilterList(
new TimelineFilterList( new TimelineFilterList(
@ -283,7 +279,8 @@ public void testMetricFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Improper brackers. Exception should have been thrown."); fail("Improper brackers. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(((key11 ne 234 AND key12 gt v3 OR key13 lt 24 OR key14 le 456 " + expr = "(((key11 ne 234 AND key12 gt v3 OR key13 lt 24 OR key14 le 456 " +
"AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " +
@ -291,7 +288,8 @@ public void testMetricFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Non Numeric value. Exception should have been thrown."); fail("Non Numeric value. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(((key11 ne (234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " + expr = "(((key11 ne (234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " +
"AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " +
@ -299,7 +297,8 @@ public void testMetricFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Unexpected opening bracket. Exception should have been thrown."); fail("Unexpected opening bracket. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(((k)ey11 ne 234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " + expr = "(((k)ey11 ne 234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " +
"AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " +
@ -307,7 +306,8 @@ public void testMetricFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Unexpected closing bracket. Exception should have been thrown."); fail("Unexpected closing bracket. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(((key11 rs 234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " + expr = "(((key11 rs 234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " +
"AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " +
@ -315,7 +315,8 @@ public void testMetricFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Improper compare op. Exception should have been thrown."); fail("Improper compare op. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(((key11 ne 234 PI key12 gt 3 OR key13 lt 24 OR key14 le 456 " + expr = "(((key11 ne 234 PI key12 gt 3 OR key13 lt 24 OR key14 le 456 " +
"AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " +
@ -323,7 +324,8 @@ public void testMetricFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Improper op. Exception should have been thrown."); fail("Improper op. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(((key11 ne 234 PI key12 gt 3 OR key13 lt 24 OR key14 le 456 " + expr = "(((key11 ne 234 PI key12 gt 3 OR key13 lt 24 OR key14 le 456 " +
"AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " +
@ -331,32 +333,36 @@ public void testMetricFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Improper op. Exception should have been thrown."); fail("Improper op. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(key11 ne 234 AND key12 gt 3)) OR (key13 lt 24 OR key14 le 456)"; expr = "(key11 ne 234 AND key12 gt 3)) OR (key13 lt 24 OR key14 le 456)";
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Unbalanced brackets. Exception should have been thrown."); fail("Unbalanced brackets. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(key11 rne 234 AND key12 gt 3) OR (key13 lt 24 OR key14 le 456)"; expr = "(key11 rne 234 AND key12 gt 3) OR (key13 lt 24 OR key14 le 456)";
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Invalid compareop. Exception should have been thrown."); fail("Invalid compareop. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
expr = "(key11 ne 234 AND key12 gt 3) OR (key13 lt 24 OR key14 le"; expr = "(key11 ne 234 AND key12 gt 3) OR (key13 lt 24 OR key14 le";
try { try {
TimelineReaderWebServicesUtils.parseMetricFilters(expr); TimelineReaderWebServicesUtils.parseMetricFilters(expr);
fail("Compareop cant be parsed. Exception should have been thrown."); fail("Compareop cant be parsed. Exception should have been thrown.");
} catch (TimelineParseException e) {} } catch (TimelineParseException e) {
}
assertNull(TimelineReaderWebServicesUtils.parseMetricFilters(null)); assertNull(TimelineReaderWebServicesUtils.parseMetricFilters(null));
assertNull(TimelineReaderWebServicesUtils.parseMetricFilters(" ")); assertNull(TimelineReaderWebServicesUtils.parseMetricFilters(" "));
} }
@Test @Test
public void testConfigFiltersParsing() throws Exception { void testConfigFiltersParsing() throws Exception {
String expr = "(((key11 ne 234 AND key12 eq val12) AND " + String expr = "(((key11 ne 234 AND key12 eq val12) AND " +
"(key13 ene val13 OR key14 eq 567)) OR (key21 eq val_21 OR key22 eq " + "(key13 ene val13 OR key14 eq 567)) OR (key21 eq val_21 OR key22 eq " +
"val.22))"; "val.22))";
@ -412,7 +418,7 @@ public void testConfigFiltersParsing() throws Exception {
parseKVFilters(expr, true), expectedList); parseKVFilters(expr, true), expectedList);
// Test with unnecessary spaces. // Test with unnecessary spaces.
expr = " abc ne 234 AND def eq 23 OR rst ene "+ expr = " abc ne 234 AND def eq 23 OR rst ene " +
" 24 OR xyz eq 456 AND pqr eq 2 "; " 24 OR xyz eq 456 AND pqr eq 2 ";
expectedList = new TimelineFilterList( expectedList = new TimelineFilterList(
new TimelineFilterList( new TimelineFilterList(
@ -439,10 +445,12 @@ public void testConfigFiltersParsing() throws Exception {
TimelineReaderWebServicesUtils.parseKVFilters(expr, true); TimelineReaderWebServicesUtils.parseKVFilters(expr, true);
fail("Invalid compareop specified for config filters. Should be either" + fail("Invalid compareop specified for config filters. Should be either" +
" eq,ne or ene and exception should have been thrown."); " eq,ne or ene and exception should have been thrown.");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
} }
@Test @Test
public void testInfoFiltersParsing() throws Exception { void testInfoFiltersParsing() throws Exception {
String expr = "(((key11 ne 234 AND key12 eq val12) AND " + String expr = "(((key11 ne 234 AND key12 eq val12) AND " +
"(key13 ene val13 OR key14 eq 567)) OR (key21 eq val_21 OR key22 eq " + "(key13 ene val13 OR key14 eq 567)) OR (key21 eq val_21 OR key22 eq " +
"5.0))"; "5.0))";
@ -499,7 +507,7 @@ public void testInfoFiltersParsing() throws Exception {
parseKVFilters(expr, false), expectedList); parseKVFilters(expr, false), expectedList);
// Test with unnecessary spaces. // Test with unnecessary spaces.
expr = " abc ne 234 AND def eq 23 OR rst ene "+ expr = " abc ne 234 AND def eq 23 OR rst ene " +
" 24 OR xyz eq 456 AND pqr eq 2 "; " 24 OR xyz eq 456 AND pqr eq 2 ";
expectedList = new TimelineFilterList( expectedList = new TimelineFilterList(
new TimelineFilterList( new TimelineFilterList(
@ -524,7 +532,7 @@ public void testInfoFiltersParsing() throws Exception {
expr = "abdeq"; expr = "abdeq";
try { try {
TimelineReaderWebServicesUtils.parseKVFilters(expr, false); TimelineReaderWebServicesUtils.parseKVFilters(expr, false);
Assert.fail("Expression valuation should throw exception."); fail("Expression valuation should throw exception.");
} catch (TimelineParseException e) { } catch (TimelineParseException e) {
// expected: do nothing // expected: do nothing
} }
@ -532,7 +540,7 @@ public void testInfoFiltersParsing() throws Exception {
expr = "abc gt 234 AND defeq"; expr = "abc gt 234 AND defeq";
try { try {
TimelineReaderWebServicesUtils.parseKVFilters(expr, false); TimelineReaderWebServicesUtils.parseKVFilters(expr, false);
Assert.fail("Expression valuation should throw exception."); fail("Expression valuation should throw exception.");
} catch (TimelineParseException e) { } catch (TimelineParseException e) {
// expected: do nothing // expected: do nothing
} }
@ -540,14 +548,14 @@ public void testInfoFiltersParsing() throws Exception {
expr = "((key11 ne 234 AND key12 eq val12) AND (key13eq OR key14 eq va14))"; expr = "((key11 ne 234 AND key12 eq val12) AND (key13eq OR key14 eq va14))";
try { try {
TimelineReaderWebServicesUtils.parseKVFilters(expr, false); TimelineReaderWebServicesUtils.parseKVFilters(expr, false);
Assert.fail("Expression valuation should throw exception."); fail("Expression valuation should throw exception.");
} catch (TimelineParseException e) { } catch (TimelineParseException e) {
// expected: do nothing // expected: do nothing
} }
} }
@Test @Test
public void testEventFiltersParsing() throws Exception { void testEventFiltersParsing() throws Exception {
String expr = "abc,def"; String expr = "abc,def";
TimelineFilterList expectedList = new TimelineFilterList( TimelineFilterList expectedList = new TimelineFilterList(
new TimelineExistsFilter(TimelineCompareOp.EQUAL, "abc"), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "abc"),
@ -641,85 +649,96 @@ public void testEventFiltersParsing() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Improper brackets. Exception should have been thrown"); fail("Improper brackets. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "(((!(abc,def,uvc) (OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + expr = "(((!(abc,def,uvc) (OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" +
" OR ((bcd,tyu) AND uvb))"; " OR ((bcd,tyu) AND uvb))";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unexpected opening bracket. Exception should have been thrown"); fail("Unexpected opening bracket. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "(((!(abc,def,uvc) OR) (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + expr = "(((!(abc,def,uvc) OR) (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" +
" OR ((bcd,tyu) AND uvb))"; " OR ((bcd,tyu) AND uvb))";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unexpected closing bracket. Exception should have been thrown"); fail("Unexpected closing bracket. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "(((!(abc,def,uvc) PI (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + expr = "(((!(abc,def,uvc) PI (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" +
" OR ((bcd,tyu) AND uvb))"; " OR ((bcd,tyu) AND uvb))";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Invalid op. Exception should have been thrown"); fail("Invalid op. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "(((!(abc,def,uvc) !OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + expr = "(((!(abc,def,uvc) !OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" +
" OR ((bcd,tyu) AND uvb))"; " OR ((bcd,tyu) AND uvb))";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unexpected ! char. Exception should have been thrown"); fail("Unexpected ! char. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "abc,def,uvc) OR (rst, uvx)"; expr = "abc,def,uvc) OR (rst, uvx)";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unexpected closing bracket. Exception should have been thrown"); fail("Unexpected closing bracket. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "abc,def,uvc OR )rst, uvx)"; expr = "abc,def,uvc OR )rst, uvx)";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unexpected closing bracket. Exception should have been thrown"); fail("Unexpected closing bracket. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "abc,def,uvc OR ,rst, uvx)"; expr = "abc,def,uvc OR ,rst, uvx)";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unexpected delimiter. Exception should have been thrown"); fail("Unexpected delimiter. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "abc,def,uvc OR ! "; expr = "abc,def,uvc OR ! ";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unexpected not char. Exception should have been thrown"); fail("Unexpected not char. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "(abc,def,uvc)) OR (rst, uvx)"; expr = "(abc,def,uvc)) OR (rst, uvx)";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("Unbalanced brackets. Exception should have been thrown"); fail("Unbalanced brackets. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "(((! ,(abc,def,uvc) OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu" + expr = "(((! ,(abc,def,uvc) OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu" +
"))) OR ((bcd,tyu) AND uvb))"; "))) OR ((bcd,tyu) AND uvb))";
try { try {
TimelineReaderWebServicesUtils.parseEventFilters(expr); TimelineReaderWebServicesUtils.parseEventFilters(expr);
fail("( should follow ! char. Exception should have been thrown"); fail("( should follow ! char. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
assertNull(TimelineReaderWebServicesUtils.parseEventFilters(null)); assertNull(TimelineReaderWebServicesUtils.parseEventFilters(null));
assertNull(TimelineReaderWebServicesUtils.parseEventFilters(" ")); assertNull(TimelineReaderWebServicesUtils.parseEventFilters(" "));
} }
@Test @Test
public void testRelationFiltersParsing() throws Exception { void testRelationFiltersParsing() throws Exception {
String expr = "type1:entity11,type2:entity21:entity22"; String expr = "type1:entity11,type2:entity21:entity22";
TimelineFilterList expectedList = new TimelineFilterList( TimelineFilterList expectedList = new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type1", Sets.newHashSet((Object)"entity11")), "type1", Sets.newHashSet((Object) "entity11")),
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type2", Sets.newHashSet((Object)"entity21", "entity22")) "type2", Sets.newHashSet((Object) "entity21", "entity22"))
); );
verifyFilterList(expr, TimelineReaderWebServicesUtils. verifyFilterList(expr, TimelineReaderWebServicesUtils.
parseRelationFilters(expr), expectedList); parseRelationFilters(expr), expectedList);
@ -733,16 +752,16 @@ public void testRelationFiltersParsing() throws Exception {
expectedList = new TimelineFilterList(Operator.OR, expectedList = new TimelineFilterList(Operator.OR,
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type1", Sets.newHashSet((Object)"entity11")), "type1", Sets.newHashSet((Object) "entity11")),
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type2", Sets.newHashSet((Object)"entity21", "entity22")) "type2", Sets.newHashSet((Object) "entity21", "entity22"))
), ),
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type3", Sets.newHashSet( "type3", Sets.newHashSet(
(Object)"entity31", "entity32", "entity33")), (Object) "entity31", "entity32", "entity33")),
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type1", Sets.newHashSet((Object)"entity11", "entity12")) "type1", Sets.newHashSet((Object) "entity11", "entity12"))
) )
); );
verifyFilterList(expr, TimelineReaderWebServicesUtils. verifyFilterList(expr, TimelineReaderWebServicesUtils.
@ -754,25 +773,25 @@ public void testRelationFiltersParsing() throws Exception {
expectedList = new TimelineFilterList(Operator.OR, expectedList = new TimelineFilterList(Operator.OR,
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type1", Sets.newHashSet((Object)"entity11")), "type1", Sets.newHashSet((Object) "entity11")),
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type2", Sets.newHashSet((Object)"entity21", "entity22")), "type2", Sets.newHashSet((Object) "entity21", "entity22")),
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type5", Sets.newHashSet((Object)"entity51")) "type5", Sets.newHashSet((Object) "entity51"))
), ),
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type3", Sets.newHashSet( "type3", Sets.newHashSet(
(Object)"entity31", "entity32", "entity33")), (Object) "entity31", "entity32", "entity33")),
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type1", Sets.newHashSet((Object)"entity11", "entity12")) "type1", Sets.newHashSet((Object) "entity11", "entity12"))
) )
); );
verifyFilterList(expr, TimelineReaderWebServicesUtils. verifyFilterList(expr, TimelineReaderWebServicesUtils.
parseRelationFilters(expr), expectedList); parseRelationFilters(expr), expectedList);
expr = "(((!(type1:entity11,type2:entity21:entity22,type5:entity51) OR " + expr = "(((!(type1:entity11,type2:entity21:entity22,type5:entity51) OR " +
"(type3:entity31:entity32:entity33,type1:entity11:entity12)) AND "+ "(type3:entity31:entity32:entity33,type1:entity11:entity12)) AND " +
"(!(type11:entity111) OR !(type4:entity43:entity44:entity47:entity49," + "(!(type11:entity111) OR !(type4:entity43:entity44:entity47:entity49," +
"type7:entity71))) OR ((type2:entity2,type8:entity88) AND t9:e:e1))"; "type7:entity71))) OR ((type2:entity2,type8:entity88) AND t9:e:e1))";
expectedList = new TimelineFilterList(Operator.OR, expectedList = new TimelineFilterList(Operator.OR,
@ -780,45 +799,45 @@ public void testRelationFiltersParsing() throws Exception {
new TimelineFilterList(Operator.OR, new TimelineFilterList(Operator.OR,
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type1", Sets.newHashSet((Object)"entity11")), "type1", Sets.newHashSet((Object) "entity11")),
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type2", Sets.newHashSet( "type2", Sets.newHashSet(
(Object)"entity21", "entity22")), (Object) "entity21", "entity22")),
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type5", Sets.newHashSet((Object)"entity51")) "type5", Sets.newHashSet((Object) "entity51"))
), ),
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type3", Sets.newHashSet( "type3", Sets.newHashSet(
(Object)"entity31", "entity32", "entity33")), (Object) "entity31", "entity32", "entity33")),
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type1", Sets.newHashSet( "type1", Sets.newHashSet(
(Object)"entity11", "entity12")) (Object) "entity11", "entity12"))
) )
), ),
new TimelineFilterList(Operator.OR, new TimelineFilterList(Operator.OR,
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type11", Sets.newHashSet((Object)"entity111")) "type11", Sets.newHashSet((Object) "entity111"))
), ),
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type4", Sets.newHashSet((Object)"entity43", "entity44", "type4", Sets.newHashSet((Object) "entity43", "entity44",
"entity47", "entity49")), "entity47", "entity49")),
new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL,
"type7", Sets.newHashSet((Object)"entity71")) "type7", Sets.newHashSet((Object) "entity71"))
) )
) )
), ),
new TimelineFilterList( new TimelineFilterList(
new TimelineFilterList( new TimelineFilterList(
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type2", Sets.newHashSet((Object)"entity2")), "type2", Sets.newHashSet((Object) "entity2")),
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL,
"type8", Sets.newHashSet((Object)"entity88")) "type8", Sets.newHashSet((Object) "entity88"))
), ),
new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "t9", new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "t9",
Sets.newHashSet((Object)"e", "e1")) Sets.newHashSet((Object) "e", "e1"))
) )
); );
verifyFilterList(expr, TimelineReaderWebServicesUtils. verifyFilterList(expr, TimelineReaderWebServicesUtils.
@ -834,18 +853,19 @@ public void testRelationFiltersParsing() throws Exception {
parseRelationFilters(expr), expectedList); parseRelationFilters(expr), expectedList);
expr = "(((!(type1 : entity11,type2:entity21:entity22,type5:entity51) OR " + expr = "(((!(type1 : entity11,type2:entity21:entity22,type5:entity51) OR " +
"(type3:entity31:entity32:entity33,type1:entity11:entity12)) AND "+ "(type3:entity31:entity32:entity33,type1:entity11:entity12)) AND " +
"(!(type11:entity111) OR !(type4:entity43:entity44:entity47:entity49," + "(!(type11:entity111) OR !(type4:entity43:entity44:entity47:entity49," +
"type7:entity71))) OR ((type2:entity2,type8:entity88) AND t9:e:e1))"; "type7:entity71))) OR ((type2:entity2,type8:entity88) AND t9:e:e1))";
try { try {
TimelineReaderWebServicesUtils.parseRelationFilters(expr); TimelineReaderWebServicesUtils.parseRelationFilters(expr);
fail("Space not allowed in relation expression. Exception should have " + fail("Space not allowed in relation expression. Exception should have " +
"been thrown"); "been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
} }
@Test @Test
public void testDataToRetrieve() throws Exception { void testDataToRetrieve() throws Exception {
String expr = "abc,def"; String expr = "abc,def";
TimelineFilterList expectedList = new TimelineFilterList(Operator.OR, TimelineFilterList expectedList = new TimelineFilterList(Operator.OR,
new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "abc"), new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "abc"),
@ -913,28 +933,32 @@ public void testDataToRetrieve() throws Exception {
try { try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr); TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("No closing bracket. Exception should have been thrown"); fail("No closing bracket. Exception should have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "!abc,def,xyz"; expr = "!abc,def,xyz";
try { try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr); TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + fail("NOT(!) should be followed by opening bracket. Exception should " +
"have been thrown"); "have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "!abc,def,xyz"; expr = "!abc,def,xyz";
try { try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr); TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + fail("NOT(!) should be followed by opening bracket. Exception should " +
"have been thrown"); "have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
expr = "! r( abc,def,xyz)"; expr = "! r( abc,def,xyz)";
try { try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr); TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + fail("NOT(!) should be followed by opening bracket. Exception should " +
"have been thrown"); "have been thrown");
} catch (TimelineParseException e){} } catch (TimelineParseException e) {
}
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(null)); assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(null));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(" ")); assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(" "));

View File

@ -18,12 +18,6 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.IOException; import java.io.IOException;
import java.security.Principal; import java.security.Principal;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
@ -31,18 +25,24 @@
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import javax.servlet.FilterConfig; import javax.servlet.FilterConfig;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.reader.security.TimelineReaderWhitelistAuthorizationFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.security.TimelineReaderWhitelistAuthorizationFilter;
import org.junit.Test;
import org.mockito.Mockito; import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/** /**
* Unit tests for {@link TimelineReaderWhitelistAuthorizationFilter}. * Unit tests for {@link TimelineReaderWhitelistAuthorizationFilter}.
@ -85,7 +85,7 @@ public ServletContext getServletContext() {
} }
@Test @Test
public void checkFilterAllowedUser() throws ServletException, IOException { void checkFilterAllowedUser() throws ServletException, IOException {
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true"); map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true");
map.put(YarnConfiguration.TIMELINE_SERVICE_READ_ALLOWED_USERS, map.put(YarnConfiguration.TIMELINE_SERVICE_READ_ALLOWED_USERS,
@ -111,7 +111,7 @@ public String getName() {
} }
@Test @Test
public void checkFilterNotAllowedUser() throws ServletException, IOException { void checkFilterNotAllowedUser() throws ServletException, IOException {
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true"); map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true");
map.put(YarnConfiguration.TIMELINE_SERVICE_READ_ALLOWED_USERS, map.put(YarnConfiguration.TIMELINE_SERVICE_READ_ALLOWED_USERS,
@ -138,7 +138,7 @@ public String getName() {
} }
@Test @Test
public void checkFilterAllowedUserGroup() void checkFilterAllowedUserGroup()
throws ServletException, IOException, InterruptedException { throws ServletException, IOException, InterruptedException {
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true"); map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true");
@ -172,7 +172,7 @@ public Object run() throws Exception {
} }
@Test @Test
public void checkFilterNotAlloweGroup() void checkFilterNotAlloweGroup()
throws ServletException, IOException, InterruptedException { throws ServletException, IOException, InterruptedException {
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true"); map.put(YarnConfiguration.TIMELINE_SERVICE_READ_AUTH_ENABLED, "true");
@ -207,7 +207,7 @@ public Object run() throws Exception {
} }
@Test @Test
public void checkFilterAllowAdmins() void checkFilterAllowAdmins()
throws ServletException, IOException, InterruptedException { throws ServletException, IOException, InterruptedException {
// check that users in admin acl list are allowed to read // check that users in admin acl list are allowed to read
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
@ -243,7 +243,7 @@ public Object run() throws Exception {
} }
@Test @Test
public void checkFilterAllowAdminsWhenNoUsersSet() void checkFilterAllowAdminsWhenNoUsersSet()
throws ServletException, IOException, InterruptedException { throws ServletException, IOException, InterruptedException {
// check that users in admin acl list are allowed to read // check that users in admin acl list are allowed to read
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
@ -277,7 +277,7 @@ public Object run() throws Exception {
} }
@Test @Test
public void checkFilterAllowNoOneWhenAdminAclsEmptyAndUserAclsEmpty() void checkFilterAllowNoOneWhenAdminAclsEmptyAndUserAclsEmpty()
throws ServletException, IOException, InterruptedException { throws ServletException, IOException, InterruptedException {
// check that users in admin acl list are allowed to read // check that users in admin acl list are allowed to read
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
@ -311,7 +311,7 @@ public Object run() throws Exception {
} }
@Test @Test
public void checkFilterReadAuthDisabledNoAclSettings() void checkFilterReadAuthDisabledNoAclSettings()
throws ServletException, IOException, InterruptedException { throws ServletException, IOException, InterruptedException {
// Default settings for Read Auth Enabled (false) // Default settings for Read Auth Enabled (false)
// No values in admin acls or allowed read user list // No values in admin acls or allowed read user list
@ -344,7 +344,7 @@ public Object run() throws Exception {
} }
@Test @Test
public void checkFilterReadAuthDisabledButAclSettingsPopulated() void checkFilterReadAuthDisabledButAclSettingsPopulated()
throws ServletException, IOException, InterruptedException { throws ServletException, IOException, InterruptedException {
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
// Default settings for Read Auth Enabled (false) // Default settings for Read Auth Enabled (false)

View File

@ -18,16 +18,16 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertEquals; import org.junit.jupiter.api.Test;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import org.junit.Test; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.fail;
public class TestTimelineUIDConverter { public class TestTimelineUIDConverter {
@Test @Test
public void testUIDEncodingDecoding() throws Exception { void testUIDEncodingDecoding() throws Exception {
TimelineReaderContext context = new TimelineReaderContext( TimelineReaderContext context = new TimelineReaderContext(
"!cluster", "!b*o*!xer", "oozie*", null, null, null, null); "!cluster", "!b*o*!xer", "oozie*", null, null, null, null);
String uid = TimelineUIDConverter.FLOW_UID.encodeUID(context); String uid = TimelineUIDConverter.FLOW_UID.encodeUID(context);
@ -80,7 +80,7 @@ public void testUIDEncodingDecoding() throws Exception {
} }
@Test @Test
public void testUIDNotProperlyEscaped() throws Exception { void testUIDNotProperlyEscaped() throws Exception {
try { try {
TimelineUIDConverter.FLOW_UID.decodeUID("*!cluster!*!b*o***!xer!oozie**"); TimelineUIDConverter.FLOW_UID.decodeUID("*!cluster!*!b*o***!xer!oozie**");
fail("UID not properly escaped. Exception should have been thrown."); fail("UID not properly escaped. Exception should have been thrown.");

View File

@ -30,6 +30,11 @@
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@ -53,11 +58,9 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.junit.AfterClass;
import org.junit.Assert; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.Before; import static org.junit.jupiter.api.Assertions.fail;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestFileSystemTimelineReaderImpl { public class TestFileSystemTimelineReaderImpl {
@ -65,7 +68,7 @@ public class TestFileSystemTimelineReaderImpl {
TestFileSystemTimelineReaderImpl.class.getSimpleName()).getAbsolutePath(); TestFileSystemTimelineReaderImpl.class.getSimpleName()).getAbsolutePath();
private FileSystemTimelineReaderImpl reader; private FileSystemTimelineReaderImpl reader;
@BeforeClass @BeforeAll
public static void setup() throws Exception { public static void setup() throws Exception {
initializeDataDirectory(ROOT_DIR); initializeDataDirectory(ROOT_DIR);
} }
@ -89,12 +92,12 @@ public static void initializeDataDirectory(String rootDir) throws Exception {
(new File(rootDir)).deleteOnExit(); (new File(rootDir)).deleteOnExit();
} }
@AfterClass @AfterAll
public static void tearDown() throws Exception { public static void tearDown() throws Exception {
FileUtils.deleteDirectory(new File(ROOT_DIR)); FileUtils.deleteDirectory(new File(ROOT_DIR));
} }
@Before @BeforeEach
public void init() throws Exception { public void init() throws Exception {
reader = new FileSystemTimelineReaderImpl(); reader = new FileSystemTimelineReaderImpl();
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
@ -313,141 +316,141 @@ private static File getAppDir(String rootDir, String cluster, String user,
} }
@Test @Test
public void testGetEntityDefaultView() throws Exception { void testGetEntityDefaultView() throws Exception {
// If no fields are specified, entity is returned with default view i.e. // If no fields are specified, entity is returned with default view i.e.
// only the id, type and created time. // only the id, type and created time.
TimelineEntity result = reader.getEntity( TimelineEntity result = reader.getEntity(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", "id_1"), "app", "id_1"),
new TimelineDataToRetrieve(null, null, null, null, null, null)); new TimelineDataToRetrieve(null, null, null, null, null, null));
Assert.assertEquals( assertEquals(
(new TimelineEntity.Identifier("app", "id_1")).toString(), (new TimelineEntity.Identifier("app", "id_1")).toString(),
result.getIdentifier().toString()); result.getIdentifier().toString());
Assert.assertEquals((Long)1425016502000L, result.getCreatedTime()); assertEquals((Long) 1425016502000L, result.getCreatedTime());
Assert.assertEquals(0, result.getConfigs().size()); assertEquals(0, result.getConfigs().size());
Assert.assertEquals(0, result.getMetrics().size()); assertEquals(0, result.getMetrics().size());
} }
@Test @Test
public void testGetEntityByClusterAndApp() throws Exception { void testGetEntityByClusterAndApp() throws Exception {
// Cluster and AppId should be enough to get an entity. // Cluster and AppId should be enough to get an entity.
TimelineEntity result = reader.getEntity( TimelineEntity result = reader.getEntity(
new TimelineReaderContext("cluster1", null, null, null, "app1", "app", new TimelineReaderContext("cluster1", null, null, null, "app1", "app",
"id_1"), "id_1"),
new TimelineDataToRetrieve(null, null, null, null, null, null)); new TimelineDataToRetrieve(null, null, null, null, null, null));
Assert.assertEquals( assertEquals(
(new TimelineEntity.Identifier("app", "id_1")).toString(), (new TimelineEntity.Identifier("app", "id_1")).toString(),
result.getIdentifier().toString()); result.getIdentifier().toString());
Assert.assertEquals((Long)1425016502000L, result.getCreatedTime()); assertEquals((Long) 1425016502000L, result.getCreatedTime());
Assert.assertEquals(0, result.getConfigs().size()); assertEquals(0, result.getConfigs().size());
Assert.assertEquals(0, result.getMetrics().size()); assertEquals(0, result.getMetrics().size());
} }
/** This test checks whether we can handle commas in app flow mapping csv. */ /** This test checks whether we can handle commas in app flow mapping csv. */
@Test @Test
public void testAppFlowMappingCsv() throws Exception { void testAppFlowMappingCsv() throws Exception {
// Test getting an entity by cluster and app where flow entry // Test getting an entity by cluster and app where flow entry
// in app flow mapping csv has commas. // in app flow mapping csv has commas.
TimelineEntity result = reader.getEntity( TimelineEntity result = reader.getEntity(
new TimelineReaderContext("cluster1", null, null, null, "app2", new TimelineReaderContext("cluster1", null, null, null, "app2",
"app", "id_5"), "app", "id_5"),
new TimelineDataToRetrieve(null, null, null, null, null, null)); new TimelineDataToRetrieve(null, null, null, null, null, null));
Assert.assertEquals( assertEquals(
(new TimelineEntity.Identifier("app", "id_5")).toString(), (new TimelineEntity.Identifier("app", "id_5")).toString(),
result.getIdentifier().toString()); result.getIdentifier().toString());
Assert.assertEquals((Long)1425016502050L, result.getCreatedTime()); assertEquals((Long) 1425016502050L, result.getCreatedTime());
} }
@Test @Test
public void testGetEntityCustomFields() throws Exception { void testGetEntityCustomFields() throws Exception {
// Specified fields in addition to default view will be returned. // Specified fields in addition to default view will be returned.
TimelineEntity result = reader.getEntity( TimelineEntity result = reader.getEntity(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", "id_1"), "app", "id_1"),
new TimelineDataToRetrieve(null, null, new TimelineDataToRetrieve(null, null,
EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS), null, null, EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS), null, null,
null)); null));
Assert.assertEquals( assertEquals(
(new TimelineEntity.Identifier("app", "id_1")).toString(), (new TimelineEntity.Identifier("app", "id_1")).toString(),
result.getIdentifier().toString()); result.getIdentifier().toString());
Assert.assertEquals((Long)1425016502000L, result.getCreatedTime()); assertEquals((Long) 1425016502000L, result.getCreatedTime());
Assert.assertEquals(3, result.getConfigs().size()); assertEquals(3, result.getConfigs().size());
Assert.assertEquals(3, result.getMetrics().size()); assertEquals(3, result.getMetrics().size());
Assert.assertEquals(2, result.getInfo().size()); assertEquals(2, result.getInfo().size());
// No events will be returned // No events will be returned
Assert.assertEquals(0, result.getEvents().size()); assertEquals(0, result.getEvents().size());
} }
@Test @Test
public void testGetEntityAllFields() throws Exception { void testGetEntityAllFields() throws Exception {
// All fields of TimelineEntity will be returned. // All fields of TimelineEntity will be returned.
TimelineEntity result = reader.getEntity( TimelineEntity result = reader.getEntity(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", "id_1"), "app", "id_1"),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null, new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null,
null, null)); null, null));
Assert.assertEquals( assertEquals(
(new TimelineEntity.Identifier("app", "id_1")).toString(), (new TimelineEntity.Identifier("app", "id_1")).toString(),
result.getIdentifier().toString()); result.getIdentifier().toString());
Assert.assertEquals((Long)1425016502000L, result.getCreatedTime()); assertEquals((Long) 1425016502000L, result.getCreatedTime());
Assert.assertEquals(3, result.getConfigs().size()); assertEquals(3, result.getConfigs().size());
Assert.assertEquals(3, result.getMetrics().size()); assertEquals(3, result.getMetrics().size());
// All fields including events will be returned. // All fields including events will be returned.
Assert.assertEquals(2, result.getEvents().size()); assertEquals(2, result.getEvents().size());
} }
@Test @Test
public void testGetAllEntities() throws Exception { void testGetAllEntities() throws Exception {
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), new TimelineEntityFilters.Builder().build(), "app", null), new TimelineEntityFilters.Builder().build(),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null, new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null,
null, null)); null, null));
// All 4 entities will be returned // All 4 entities will be returned
Assert.assertEquals(4, result.size()); assertEquals(4, result.size());
} }
@Test @Test
public void testGetEntitiesWithLimit() throws Exception { void testGetEntitiesWithLimit() throws Exception {
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().entityLimit(2L).build(), new TimelineEntityFilters.Builder().entityLimit(2L).build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
// Needs to be rewritten once hashcode and equals for // Needs to be rewritten once hashcode and equals for
// TimelineEntity is implemented // TimelineEntity is implemented
// Entities with id_1 and id_4 should be returned, // Entities with id_1 and id_4 should be returned,
// based on created time, descending. // based on created time, descending.
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_4")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_4")) {
Assert.fail("Entity not sorted by created time"); fail("Entity not sorted by created time");
} }
} }
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().entityLimit(3L).build(), new TimelineEntityFilters.Builder().entityLimit(3L).build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
// Even though 2 entities out of 4 have same created time, one entity // Even though 2 entities out of 4 have same created time, one entity
// is left out due to limit // is left out due to limit
Assert.assertEquals(3, result.size()); assertEquals(3, result.size());
} }
@Test @Test
public void testGetEntitiesByTimeWindows() throws Exception { void testGetEntitiesByTimeWindows() throws Exception {
// Get entities based on created time start and end time range. // Get entities based on created time start and end time range.
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().createdTimeBegin(1425016502030L) new TimelineEntityFilters.Builder().createdTimeBegin(1425016502030L)
.createTimeEnd(1425016502060L).build(), .createTimeEnd(1425016502060L).build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
// Only one entity with ID id_4 should be returned. // Only one entity with ID id_4 should be returned.
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_4")) { if (!entity.getId().equals("id_4")) {
Assert.fail("Incorrect filtering based on created time range"); fail("Incorrect filtering based on created time range");
} }
} }
@ -458,44 +461,44 @@ public void testGetEntitiesByTimeWindows() throws Exception {
new TimelineEntityFilters.Builder().createTimeEnd(1425016502010L) new TimelineEntityFilters.Builder().createTimeEnd(1425016502010L)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(3, result.size()); assertEquals(3, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (entity.getId().equals("id_4")) { if (entity.getId().equals("id_4")) {
Assert.fail("Incorrect filtering based on created time range"); fail("Incorrect filtering based on created time range");
} }
} }
// Get entities if only created time start is specified. // Get entities if only created time start is specified.
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().createdTimeBegin(1425016502010L) new TimelineEntityFilters.Builder().createdTimeBegin(1425016502010L)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_4")) { if (!entity.getId().equals("id_4")) {
Assert.fail("Incorrect filtering based on created time range"); fail("Incorrect filtering based on created time range");
} }
} }
} }
@Test @Test
public void testGetFilteredEntities() throws Exception { void testGetFilteredEntities() throws Exception {
// Get entities based on info filters. // Get entities based on info filters.
TimelineFilterList infoFilterList = new TimelineFilterList(); TimelineFilterList infoFilterList = new TimelineFilterList();
infoFilterList.addFilter( infoFilterList.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5)); new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList).build(), new TimelineEntityFilters.Builder().infoFilters(infoFilterList).build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
// Only one entity with ID id_3 should be returned. // Only one entity with ID id_3 should be returned.
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_3")) { if (!entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on info filters"); fail("Incorrect filtering based on info filters");
} }
} }
@ -507,14 +510,14 @@ public void testGetFilteredEntities() throws Exception {
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc")); new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList) new TimelineEntityFilters.Builder().configFilters(confFilterList)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_3")) { if (!entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on config filters"); fail("Incorrect filtering based on config filters");
} }
} }
@ -526,13 +529,13 @@ public void testGetFilteredEntities() throws Exception {
new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_4")); new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_4"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().eventFilters(eventFilters).build(), new TimelineEntityFilters.Builder().eventFilters(eventFilters).build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_3")) { if (!entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on event filters"); fail("Incorrect filtering based on event filters");
} }
} }
@ -542,15 +545,15 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.GREATER_OR_EQUAL, "metric3", 0L)); TimelineCompareOp.GREATER_OR_EQUAL, "metric3", 0L));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList) new TimelineEntityFilters.Builder().metricFilters(metricFilterList)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
// Two entities with IDs' id_1 and id_2 should be returned. // Two entities with IDs' id_1 and id_2 should be returned.
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on metric filters"); fail("Incorrect filtering based on metric filters");
} }
} }
@ -569,14 +572,14 @@ public void testGetFilteredEntities() throws Exception {
new TimelineFilterList(Operator.OR, list1, list2); new TimelineFilterList(Operator.OR, list1, list2);
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList1) new TimelineEntityFilters.Builder().configFilters(confFilterList1)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters"); fail("Incorrect filtering based on config filters");
} }
} }
@ -592,14 +595,14 @@ public void testGetFilteredEntities() throws Exception {
new TimelineFilterList(Operator.OR, list3, list4); new TimelineFilterList(Operator.OR, list3, list4);
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList2) new TimelineEntityFilters.Builder().configFilters(confFilterList2)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters"); fail("Incorrect filtering based on config filters");
} }
} }
@ -610,14 +613,14 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.NOT_EQUAL, "config_3", "abc")); TimelineCompareOp.NOT_EQUAL, "config_3", "abc"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList3) new TimelineEntityFilters.Builder().configFilters(confFilterList3)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
for(TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2")) { if (!entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters"); fail("Incorrect filtering based on config filters");
} }
} }
@ -628,11 +631,11 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.EQUAL, "config_3", "def")); TimelineCompareOp.EQUAL, "config_3", "def"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList4) new TimelineEntityFilters.Builder().configFilters(confFilterList4)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size()); assertEquals(0, result.size());
TimelineFilterList confFilterList5 = new TimelineFilterList(Operator.OR); TimelineFilterList confFilterList5 = new TimelineFilterList(Operator.OR);
confFilterList5.addFilter(new TimelineKeyValueFilter( confFilterList5.addFilter(new TimelineKeyValueFilter(
@ -641,14 +644,14 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.EQUAL, "config_3", "def")); TimelineCompareOp.EQUAL, "config_3", "def"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList5) new TimelineEntityFilters.Builder().configFilters(confFilterList5)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2")) { if (!entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters"); fail("Incorrect filtering based on config filters");
} }
} }
@ -665,15 +668,15 @@ public void testGetFilteredEntities() throws Exception {
new TimelineFilterList(Operator.OR, list6, list7); new TimelineFilterList(Operator.OR, list6, list7);
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) new TimelineEntityFilters.Builder().metricFilters(metricFilterList1)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
// Two entities with IDs' id_2 and id_3 should be returned. // Two entities with IDs' id_2 and id_3 should be returned.
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2") && !entity.getId().equals("id_3")) { if (!entity.getId().equals("id_2") && !entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on metric filters"); fail("Incorrect filtering based on metric filters");
} }
} }
@ -684,14 +687,14 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23)); TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList2) new TimelineEntityFilters.Builder().metricFilters(metricFilterList2)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) { if (!entity.getId().equals("id_1")) {
Assert.fail("Incorrect filtering based on metric filters"); fail("Incorrect filtering based on metric filters");
} }
} }
@ -702,11 +705,11 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23)); TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList3) new TimelineEntityFilters.Builder().metricFilters(metricFilterList3)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size()); assertEquals(0, result.size());
TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR); TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR);
metricFilterList4.addFilter(new TimelineCompareFilter( metricFilterList4.addFilter(new TimelineCompareFilter(
@ -715,14 +718,14 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23)); TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList4) new TimelineEntityFilters.Builder().metricFilters(metricFilterList4)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on metric filters"); fail("Incorrect filtering based on metric filters");
} }
} }
@ -731,14 +734,14 @@ public void testGetFilteredEntities() throws Exception {
TimelineCompareOp.NOT_EQUAL, "metric2", 74)); TimelineCompareOp.NOT_EQUAL, "metric2", 74));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList5) new TimelineEntityFilters.Builder().metricFilters(metricFilterList5)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on metric filters"); fail("Incorrect filtering based on metric filters");
} }
} }
@ -749,11 +752,11 @@ public void testGetFilteredEntities() throws Exception {
new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "info4", 20)); new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "info4", 20));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList1) new TimelineEntityFilters.Builder().infoFilters(infoFilterList1)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size()); assertEquals(0, result.size());
TimelineFilterList infoFilterList2 = new TimelineFilterList(Operator.OR); TimelineFilterList infoFilterList2 = new TimelineFilterList(Operator.OR);
infoFilterList2.addFilter( infoFilterList2.addFilter(
@ -762,14 +765,14 @@ public void testGetFilteredEntities() throws Exception {
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info1", "val1")); new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info1", "val1"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList2) new TimelineEntityFilters.Builder().infoFilters(infoFilterList2)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on info filters"); fail("Incorrect filtering based on info filters");
} }
} }
@ -780,11 +783,11 @@ public void testGetFilteredEntities() throws Exception {
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5")); new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList3) new TimelineEntityFilters.Builder().infoFilters(infoFilterList3)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size()); assertEquals(0, result.size());
TimelineFilterList infoFilterList4 = new TimelineFilterList(Operator.OR); TimelineFilterList infoFilterList4 = new TimelineFilterList(Operator.OR);
infoFilterList4.addFilter( infoFilterList4.addFilter(
@ -793,55 +796,55 @@ public void testGetFilteredEntities() throws Exception {
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5")); new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList4) new TimelineEntityFilters.Builder().infoFilters(infoFilterList4)
.build(), .build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) { if (!entity.getId().equals("id_1")) {
Assert.fail("Incorrect filtering based on info filters"); fail("Incorrect filtering based on info filters");
} }
} }
} }
@Test @Test
public void testGetEntitiesByRelations() throws Exception { void testGetEntitiesByRelations() throws Exception {
// Get entities based on relatesTo. // Get entities based on relatesTo.
TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR); TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR);
Set<Object> relatesToIds = Set<Object> relatesToIds =
new HashSet<Object>(Arrays.asList((Object)"flow1")); new HashSet<Object>(Arrays.asList((Object) "flow1"));
relatesTo.addFilter(new TimelineKeyValuesFilter( relatesTo.addFilter(new TimelineKeyValuesFilter(
TimelineCompareOp.EQUAL, "flow", relatesToIds)); TimelineCompareOp.EQUAL, "flow", relatesToIds));
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().relatesTo(relatesTo).build(), new TimelineEntityFilters.Builder().relatesTo(relatesTo).build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); assertEquals(1, result.size());
// Only one entity with ID id_1 should be returned. // Only one entity with ID id_1 should be returned.
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) { if (!entity.getId().equals("id_1")) {
Assert.fail("Incorrect filtering based on relatesTo"); fail("Incorrect filtering based on relatesTo");
} }
} }
// Get entities based on isRelatedTo. // Get entities based on isRelatedTo.
TimelineFilterList isRelatedTo = new TimelineFilterList(Operator.OR); TimelineFilterList isRelatedTo = new TimelineFilterList(Operator.OR);
Set<Object> isRelatedToIds = Set<Object> isRelatedToIds =
new HashSet<Object>(Arrays.asList((Object)"tid1_2")); new HashSet<Object>(Arrays.asList((Object) "tid1_2"));
isRelatedTo.addFilter(new TimelineKeyValuesFilter( isRelatedTo.addFilter(new TimelineKeyValuesFilter(
TimelineCompareOp.EQUAL, "type1", isRelatedToIds)); TimelineCompareOp.EQUAL, "type1", isRelatedToIds));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters.Builder().isRelatedTo(isRelatedTo).build(), new TimelineEntityFilters.Builder().isRelatedTo(isRelatedTo).build(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); assertEquals(2, result.size());
// Two entities with IDs' id_1 and id_3 should be returned. // Two entities with IDs' id_1 and id_3 should be returned.
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on isRelatedTo"); fail("Incorrect filtering based on isRelatedTo");
} }
} }
} }

View File

@ -17,8 +17,6 @@
*/ */
package org.apache.hadoop.yarn.server.timelineservice.storage; package org.apache.hadoop.yarn.server.timelineservice.storage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
@ -29,6 +27,9 @@
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -41,13 +42,14 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext; import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.junit.Rule;
import org.junit.Test; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.rules.TemporaryFolder; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestFileSystemTimelineWriterImpl { public class TestFileSystemTimelineWriterImpl {
@Rule @TempDir
public TemporaryFolder tmpFolder = new TemporaryFolder(); private File tmpFolder;
/** /**
* Unit test for PoC YARN 3264. * Unit test for PoC YARN 3264.
@ -55,7 +57,7 @@ public class TestFileSystemTimelineWriterImpl {
* @throws Exception * @throws Exception
*/ */
@Test @Test
public void testWriteEntityToFile() throws Exception { void testWriteEntityToFile() throws Exception {
TimelineEntities te = new TimelineEntities(); TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity(); TimelineEntity entity = new TimelineEntity();
String id = "hello"; String id = "hello";
@ -89,7 +91,7 @@ public void testWriteEntityToFile() throws Exception {
try { try {
fsi = new FileSystemTimelineWriterImpl(); fsi = new FileSystemTimelineWriterImpl();
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
String outputRoot = tmpFolder.newFolder().getAbsolutePath(); String outputRoot = tmpFolder.getAbsolutePath();
conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT,
outputRoot); outputRoot);
fsi.init(conf); fsi.init(conf);
@ -107,14 +109,13 @@ public void testWriteEntityToFile() throws Exception {
FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path = new Path(fileName); Path path = new Path(fileName);
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
assertTrue("Specified path(" + fileName + ") should exist: ", assertTrue(fs.exists(path),
fs.exists(path)); "Specified path(" + fileName + ") should exist: ");
FileStatus fileStatus = fs.getFileStatus(path); FileStatus fileStatus = fs.getFileStatus(path);
assertTrue("Specified path should be a file", assertFalse(fileStatus.isDirectory(), "Specified path should be a file");
!fileStatus.isDirectory());
List<String> data = readFromFile(fs, path); List<String> data = readFromFile(fs, path);
// ensure there's only one entity + 1 new line // ensure there's only one entity + 1 new line
assertTrue("data size is:" + data.size(), data.size() == 2); assertEquals(2, data.size(), "data size is:" + data.size());
String d = data.get(0); String d = data.get(0);
// confirm the contents same as what was written // confirm the contents same as what was written
assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity)); assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity));
@ -127,14 +128,13 @@ public void testWriteEntityToFile() throws Exception {
File.separator + type2 + File.separator + id2 + File.separator + type2 + File.separator + id2 +
FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path2 = new Path(fileName2); Path path2 = new Path(fileName2);
assertTrue("Specified path(" + fileName + ") should exist: ", assertTrue(fs.exists(path2),
fs.exists(path2)); "Specified path(" + fileName + ") should exist: ");
FileStatus fileStatus2 = fs.getFileStatus(path2); FileStatus fileStatus2 = fs.getFileStatus(path2);
assertTrue("Specified path should be a file", assertFalse(fileStatus2.isDirectory(), "Specified path should be a file");
!fileStatus2.isDirectory());
List<String> data2 = readFromFile(fs, path2); List<String> data2 = readFromFile(fs, path2);
// ensure there's only one entity + 1 new line // ensure there's only one entity + 1 new line
assertTrue("data size is:" + data2.size(), data2.size() == 2); assertEquals(2, data2.size(), "data size is:" + data2.size());
String metricToString = data2.get(0); String metricToString = data2.get(0);
// confirm the contents same as what was written // confirm the contents same as what was written
assertEquals(metricToString, assertEquals(metricToString,
@ -147,7 +147,7 @@ public void testWriteEntityToFile() throws Exception {
} }
@Test @Test
public void testWriteMultipleEntities() throws Exception { void testWriteMultipleEntities() throws Exception {
String id = "appId"; String id = "appId";
String type = "app"; String type = "app";
@ -169,7 +169,7 @@ public void testWriteMultipleEntities() throws Exception {
try { try {
fsi = new FileSystemTimelineWriterImpl(); fsi = new FileSystemTimelineWriterImpl();
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
String outputRoot = tmpFolder.newFolder().getAbsolutePath(); String outputRoot = tmpFolder.getAbsolutePath();
conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT,
outputRoot); outputRoot);
fsi.init(conf); fsi.init(conf);
@ -191,13 +191,12 @@ public void testWriteMultipleEntities() throws Exception {
FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path = new Path(fileName); Path path = new Path(fileName);
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
assertTrue("Specified path(" + fileName + ") should exist: ", assertTrue(fs.exists(path),
fs.exists(path)); "Specified path(" + fileName + ") should exist: ");
FileStatus fileStatus = fs.getFileStatus(path); FileStatus fileStatus = fs.getFileStatus(path);
assertTrue("Specified path should be a file", assertFalse(fileStatus.isDirectory(), "Specified path should be a file");
!fileStatus.isDirectory());
List<String> data = readFromFile(fs, path); List<String> data = readFromFile(fs, path);
assertTrue("data size is:" + data.size(), data.size() == 3); assertEquals(3, data.size(), "data size is:" + data.size());
String d = data.get(0); String d = data.get(0);
// confirm the contents same as what was written // confirm the contents same as what was written
assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity)); assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity));
@ -215,7 +214,7 @@ public void testWriteMultipleEntities() throws Exception {
} }
@Test @Test
public void testWriteEntitiesWithEmptyFlowName() throws Exception { void testWriteEntitiesWithEmptyFlowName() throws Exception {
String id = "appId"; String id = "appId";
String type = "app"; String type = "app";
@ -230,7 +229,7 @@ public void testWriteEntitiesWithEmptyFlowName() throws Exception {
try { try {
fsi = new FileSystemTimelineWriterImpl(); fsi = new FileSystemTimelineWriterImpl();
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
String outputRoot = tmpFolder.newFolder().getAbsolutePath(); String outputRoot = tmpFolder.getAbsolutePath();
conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT,
outputRoot); outputRoot);
fsi.init(conf); fsi.init(conf);
@ -248,13 +247,12 @@ public void testWriteEntitiesWithEmptyFlowName() throws Exception {
FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path = new Path(fileName); Path path = new Path(fileName);
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
assertTrue("Specified path(" + fileName + ") should exist: ", assertTrue(fs.exists(path),
fs.exists(path)); "Specified path(" + fileName + ") should exist: ");
FileStatus fileStatus = fs.getFileStatus(path); FileStatus fileStatus = fs.getFileStatus(path);
assertTrue("Specified path should be a file", assertFalse(fileStatus.isDirectory(), "specified path should be a file");
!fileStatus.isDirectory());
List<String> data = readFromFile(fs, path); List<String> data = readFromFile(fs, path);
assertTrue("data size is:" + data.size(), data.size() == 2); assertEquals(2, data.size(), "data size is:" + data.size());
String d = data.get(0); String d = data.get(0);
// confirm the contents same as what was written // confirm the contents same as what was written
assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity)); assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity));
@ -278,4 +276,5 @@ private List<String> readFromFile(FileSystem fs, Path path)
} }
return data; return data;
} }
} }

View File

@ -18,10 +18,12 @@
package org.apache.hadoop.yarn.server.timelineservice.storage; package org.apache.hadoop.yarn.server.timelineservice.storage;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.junit.Assert;
import org.junit.Test; import static org.junit.jupiter.api.Assertions.assertEquals;
/** /**
* Test cases for {@link TimelineSchemaCreator}. * Test cases for {@link TimelineSchemaCreator}.
@ -29,13 +31,13 @@
public class TestTimelineSchemaCreator { public class TestTimelineSchemaCreator {
@Test @Test
public void testTimelineSchemaCreation() throws Exception { void testTimelineSchemaCreation() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set(YarnConfiguration.TIMELINE_SERVICE_SCHEMA_CREATOR_CLASS, conf.set(YarnConfiguration.TIMELINE_SERVICE_SCHEMA_CREATOR_CLASS,
"org.apache.hadoop.yarn.server.timelineservice.storage" + "org.apache.hadoop.yarn.server.timelineservice.storage" +
".DummyTimelineSchemaCreator"); ".DummyTimelineSchemaCreator");
TimelineSchemaCreator timelineSchemaCreator = new TimelineSchemaCreator(); TimelineSchemaCreator timelineSchemaCreator = new TimelineSchemaCreator();
Assert.assertEquals(0, timelineSchemaCreator assertEquals(0, timelineSchemaCreator
.createTimelineSchema(new String[]{}, conf)); .createTimelineSchema(new String[]{}, conf));
} }
} }