HADOOP-15187. Remove ADL mock test dependency on REST call invoked from Java SDK.

Contributed by Vishwajeet Dusane.

(cherry picked from commit bd5ab5912564d2d687651b01f552b8e4ca8c145a)
This commit is contained in:
Steve Loughran 2018-02-12 15:13:00 +00:00
parent d02e42cee4
commit 8cf88fcd1f
10 changed files with 0 additions and 1461 deletions

View File

@ -1,102 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.adl;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider;
import org.apache.hadoop.fs.adl.oauth2.AzureADTokenProvider;
import static org.apache.hadoop.fs.adl.AdlConfKeys
.AZURE_AD_TOKEN_PROVIDER_CLASS_KEY;
import static org.apache.hadoop.fs.adl.AdlConfKeys
.AZURE_AD_TOKEN_PROVIDER_TYPE_KEY;
import com.squareup.okhttp.mockwebserver.MockWebServer;
import org.junit.After;
import org.junit.Before;
/**
* Mock server to simulate Adls backend calls. This infrastructure is expandable
* to override expected server response based on the derived test functionality.
* Common functionality to generate token information before request is send to
* adls backend is also managed within AdlMockWebServer implementation using
* {@link org.apache.hadoop.fs.adl.common.CustomMockTokenProvider}.
*/
public class AdlMockWebServer {
// Create a MockWebServer. These are lean enough that you can create a new
// instance for every unit test.
private MockWebServer server = null;
private TestableAdlFileSystem fs = null;
private int port = 0;
private Configuration conf = new Configuration();
public MockWebServer getMockServer() {
return server;
}
public TestableAdlFileSystem getMockAdlFileSystem() {
return fs;
}
public int getPort() {
return port;
}
public Configuration getConf() {
return conf;
}
public void setConf(Configuration conf) {
this.conf = conf;
}
@Before
public void preTestSetup() throws IOException, URISyntaxException {
server = new MockWebServer();
// Start the server.
server.start();
// Ask the server for its URL. You'll need this to make HTTP requests.
URL baseUrl = server.getUrl("");
port = baseUrl.getPort();
// Exercise your application code, which should make those HTTP requests.
// Responses are returned in the same order that they are enqueued.
fs = new TestableAdlFileSystem();
conf.setEnum(AZURE_AD_TOKEN_PROVIDER_TYPE_KEY, TokenProviderType.Custom);
conf.setClass(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
CustomMockTokenProvider.class, AzureADTokenProvider.class);
URI uri = new URI("adl://localhost:" + port);
fs.initialize(uri, conf);
}
@After
public void postTestSetup() throws IOException {
fs.close();
server.shutdown();
}
}

View File

@ -1,262 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclEntryScope;
import org.apache.hadoop.fs.permission.AclEntryType;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.AccessControlException;
import com.squareup.okhttp.mockwebserver.MockResponse;
import org.junit.Assert;
import org.junit.Test;
/**
* Stub adl server and test acl data conversion within SDK and Hadoop adl
* client.
*/
public class TestACLFeatures extends AdlMockWebServer {
@Test(expected=AccessControlException.class)
public void testModifyAclEntries() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
List<AclEntry> entries = new ArrayList<AclEntry>();
AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
aclEntryBuilder.setName("hadoop");
aclEntryBuilder.setType(AclEntryType.USER);
aclEntryBuilder.setPermission(FsAction.ALL);
aclEntryBuilder.setScope(AclEntryScope.ACCESS);
entries.add(aclEntryBuilder.build());
aclEntryBuilder.setName("hdfs");
aclEntryBuilder.setType(AclEntryType.GROUP);
aclEntryBuilder.setPermission(FsAction.READ_WRITE);
aclEntryBuilder.setScope(AclEntryScope.DEFAULT);
entries.add(aclEntryBuilder.build());
getMockAdlFileSystem().modifyAclEntries(new Path("/test1/test2"), entries);
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem()
.modifyAclEntries(new Path("/test1/test2"), entries);
}
@Test(expected=AccessControlException.class)
public void testRemoveAclEntriesWithOnlyUsers()
throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
List<AclEntry> entries = new ArrayList<AclEntry>();
AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
aclEntryBuilder.setName("hadoop");
aclEntryBuilder.setType(AclEntryType.USER);
entries.add(aclEntryBuilder.build());
getMockAdlFileSystem().removeAclEntries(new Path("/test1/test2"), entries);
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem()
.removeAclEntries(new Path("/test1/test2"), entries);
}
@Test(expected=AccessControlException.class)
public void testRemoveAclEntries() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
List<AclEntry> entries = new ArrayList<AclEntry>();
AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
aclEntryBuilder.setName("hadoop");
aclEntryBuilder.setType(AclEntryType.USER);
aclEntryBuilder.setPermission(FsAction.ALL);
aclEntryBuilder.setScope(AclEntryScope.ACCESS);
entries.add(aclEntryBuilder.build());
aclEntryBuilder.setName("hdfs");
aclEntryBuilder.setType(AclEntryType.GROUP);
aclEntryBuilder.setPermission(FsAction.READ_WRITE);
aclEntryBuilder.setScope(AclEntryScope.DEFAULT);
entries.add(aclEntryBuilder.build());
getMockAdlFileSystem().removeAclEntries(new Path("/test1/test2"), entries);
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem()
.removeAclEntries(new Path("/test1/test2"), entries);
}
@Test(expected=AccessControlException.class)
public void testRemoveDefaultAclEntries()
throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().removeDefaultAcl(new Path("/test1/test2"));
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem().removeDefaultAcl(new Path("/test1/test2"));
}
@Test(expected=AccessControlException.class)
public void testRemoveAcl() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().removeAcl(new Path("/test1/test2"));
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem().removeAcl(new Path("/test1/test2"));
}
@Test(expected=AccessControlException.class)
public void testSetAcl() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
List<AclEntry> entries = new ArrayList<AclEntry>();
AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
aclEntryBuilder.setName("hadoop");
aclEntryBuilder.setType(AclEntryType.USER);
aclEntryBuilder.setPermission(FsAction.ALL);
aclEntryBuilder.setScope(AclEntryScope.ACCESS);
entries.add(aclEntryBuilder.build());
aclEntryBuilder.setName("hdfs");
aclEntryBuilder.setType(AclEntryType.GROUP);
aclEntryBuilder.setPermission(FsAction.READ_WRITE);
aclEntryBuilder.setScope(AclEntryScope.DEFAULT);
entries.add(aclEntryBuilder.build());
getMockAdlFileSystem().setAcl(new Path("/test1/test2"), entries);
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem().setAcl(new Path("/test1/test2"), entries);
}
@Test(expected=AccessControlException.class)
public void testCheckAccess() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.ALL);
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.EXECUTE);
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.READ);
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem()
.access(new Path("/test1/test2"), FsAction.READ_EXECUTE);
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem()
.access(new Path("/test1/test2"), FsAction.READ_WRITE);
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.NONE);
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.WRITE);
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem()
.access(new Path("/test1/test2"), FsAction.WRITE_EXECUTE);
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem()
.access(new Path("/test1/test2"), FsAction.WRITE_EXECUTE);
}
@Test(expected=AccessControlException.class)
public void testSetPermission() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem()
.setPermission(new Path("/test1/test2"), FsPermission.getDefault());
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem()
.setPermission(new Path("/test1/test2"), FsPermission.getDefault());
}
@Test(expected=AccessControlException.class)
public void testSetOwner() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200));
getMockAdlFileSystem().setOwner(new Path("/test1/test2"), "hadoop", "hdfs");
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem()
.setOwner(new Path("/test1/test2"), "hadoop", "hdfs");
}
@Test
public void getAclStatusAsExpected() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getGetAclStatusJSONResponse()));
AclStatus aclStatus = getMockAdlFileSystem()
.getAclStatus(new Path("/test1/test2"));
Assert.assertEquals(aclStatus.getGroup(), "supergroup");
Assert.assertEquals(aclStatus.getOwner(), "hadoop");
Assert.assertEquals((Short) aclStatus.getPermission().toShort(),
Short.valueOf("775", 8));
for (AclEntry entry : aclStatus.getEntries()) {
if (!(entry.toString().equalsIgnoreCase("user:carla:rw-") || entry
.toString().equalsIgnoreCase("group::r-x"))) {
Assert.fail("Unexpected entry : " + entry.toString());
}
}
}
@Test(expected=FileNotFoundException.class)
public void getAclStatusNotExists() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(404)
.setBody(TestADLResponseData.getFileNotFoundException()));
getMockAdlFileSystem().getAclStatus(new Path("/test1/test2"));
}
@Test(expected=AccessControlException.class)
public void testAclStatusDenied() throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(403)
.setBody(TestADLResponseData.getAccessControlException()));
getMockAdlFileSystem().getAclStatus(new Path("/test1/test2"));
}
}

View File

@ -1,196 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.adl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.adl.common.Parallelized;
import org.apache.hadoop.fs.adl.common.TestDataForRead;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Random;
import static org.apache.hadoop.fs.adl.AdlConfKeys.READ_AHEAD_BUFFER_SIZE_KEY;
/**
* This class is responsible for stress positional reads vs number of network
* calls required by to fetch the amount of data. Test does ensure the data
* integrity and order of the data is maintained.
*/
@RunWith(Parallelized.class)
public class TestAdlRead extends AdlMockWebServer {
private TestDataForRead testData;
public TestAdlRead(TestDataForRead testData) {
Configuration configuration = new Configuration();
configuration.setInt(READ_AHEAD_BUFFER_SIZE_KEY, 4 * 1024);
setConf(configuration);
this.testData = testData;
}
@Parameterized.Parameters(name = "{index}")
public static Collection testDataForReadOperation() {
return Arrays.asList(new Object[][] {
//--------------------------
// Test Data
//--------------------------
{new TestDataForRead("Hello World".getBytes(), 2, 1000, true)},
{new TestDataForRead(
("the problem you appear to be wrestling with is that this doesn't "
+ "display very well. ").getBytes(), 2, 1000, true)},
{new TestDataForRead(("您的數據是寶貴的資產,以您的組織,並有當前和未來價值。由於這個原因,"
+ "所有的數據應存儲以供將來分析。今天,這往往是不這樣做," + "因為傳統的分析基礎架構的限制,"
+ "像模式的預定義,存儲大數據集和不同的數據筒倉的傳播的成本。"
+ "為了應對這一挑戰,數據湖面概念被引入作為一個企業級存儲庫來存儲所有"
+ "類型的在一個地方收集到的數據。對於運作和探索性分析的目的,所有類型的" + "數據可以定義需求或模式之前被存儲在數據湖。")
.getBytes(), 2, 1000, true)}, {new TestDataForRead(
TestADLResponseData.getRandomByteArrayData(4 * 1024), 2, 10, true)},
{new TestDataForRead(TestADLResponseData.getRandomByteArrayData(100), 2,
1000, true)}, {new TestDataForRead(
TestADLResponseData.getRandomByteArrayData(1 * 1024), 2, 50, true)},
{new TestDataForRead(
TestADLResponseData.getRandomByteArrayData(8 * 1024), 3, 10,
false)}, {new TestDataForRead(
TestADLResponseData.getRandomByteArrayData(16 * 1024), 5, 10, false)},
{new TestDataForRead(
TestADLResponseData.getRandomByteArrayData(32 * 1024), 9, 10,
false)}, {new TestDataForRead(
TestADLResponseData.getRandomByteArrayData(64 * 1024), 17, 10,
false)}});
}
@Test
public void testEntireBytes() throws IOException, InterruptedException {
getMockServer().setDispatcher(testData.getDispatcher());
FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
byte[] expectedData = new byte[testData.getActualData().length];
int n = 0;
int len = expectedData.length;
int off = 0;
while (n < len) {
int count = in.read(expectedData, off + n, len - n);
if (count < 0) {
throw new EOFException();
}
n += count;
}
Assert.assertEquals(expectedData.length, testData.getActualData().length);
Assert.assertArrayEquals(expectedData, testData.getActualData());
in.close();
if (testData.isCheckOfNoOfCalls()) {
Assert.assertEquals(testData.getExpectedNoNetworkCall(),
getMockServer().getRequestCount());
}
}
@Test
public void testSeekOperation() throws IOException, InterruptedException {
getMockServer().setDispatcher(testData.getDispatcher());
FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
Random random = new Random();
for (int i = 0; i < 1000; ++i) {
int position = random.nextInt(testData.getActualData().length);
in.seek(position);
Assert.assertEquals(position, in.getPos());
Assert.assertEquals(testData.getActualData()[position] & 0xFF, in.read());
}
in.close();
if (testData.isCheckOfNoOfCalls()) {
Assert.assertEquals(testData.getExpectedNoNetworkCall(),
getMockServer().getRequestCount());
}
}
@Test
public void testReadServerCalls() throws IOException, InterruptedException {
getMockServer().setDispatcher(testData.getDispatcher());
FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
byte[] expectedData = new byte[testData.getActualData().length];
in.readFully(expectedData);
Assert.assertArrayEquals(expectedData, testData.getActualData());
Assert.assertEquals(testData.getExpectedNoNetworkCall(),
getMockServer().getRequestCount());
in.close();
}
@Test
public void testReadFully() throws IOException, InterruptedException {
getMockServer().setDispatcher(testData.getDispatcher());
FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
byte[] expectedData = new byte[testData.getActualData().length];
in.readFully(expectedData);
Assert.assertArrayEquals(expectedData, testData.getActualData());
in.readFully(0, expectedData);
Assert.assertArrayEquals(expectedData, testData.getActualData());
in.readFully(0, expectedData, 0, expectedData.length);
Assert.assertArrayEquals(expectedData, testData.getActualData());
in.close();
}
@Test
public void testRandomPositionalReadUsingReadFully()
throws IOException, InterruptedException {
getMockServer().setDispatcher(testData.getDispatcher());
FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
ByteArrayInputStream actualData = new ByteArrayInputStream(
testData.getActualData());
Random random = new Random();
for (int i = 0; i < testData.getIntensityOfTest(); ++i) {
int offset = random.nextInt(testData.getActualData().length);
int length = testData.getActualData().length - offset;
byte[] expectedData = new byte[length];
byte[] actualDataSubset = new byte[length];
actualData.reset();
actualData.skip(offset);
actualData.read(actualDataSubset, 0, length);
in.readFully(offset, expectedData, 0, length);
Assert.assertArrayEquals(expectedData, actualDataSubset);
}
for (int i = 0; i < testData.getIntensityOfTest(); ++i) {
int offset = random.nextInt(testData.getActualData().length);
int length = random.nextInt(testData.getActualData().length - offset);
byte[] expectedData = new byte[length];
byte[] actualDataSubset = new byte[length];
actualData.reset();
actualData.skip(offset);
actualData.read(actualDataSubset, 0, length);
in.readFully(offset, expectedData, 0, length);
Assert.assertArrayEquals(expectedData, actualDataSubset);
}
in.close();
}
}

View File

@ -1,299 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.adl;
import com.squareup.okhttp.mockwebserver.Dispatcher;
import com.squareup.okhttp.mockwebserver.MockResponse;
import com.squareup.okhttp.mockwebserver.RecordedRequest;
import okio.Buffer;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class is responsible for testing multiple threads trying to access same
* or multiple files from the offset.
*/
@RunWith(Parameterized.class)
public class TestConcurrentDataReadOperations extends AdlMockWebServer {
private static final Logger LOG = LoggerFactory
.getLogger(TestConcurrentDataReadOperations.class);
private static final Object LOCK = new Object();
private static FSDataInputStream commonHandle = null;
private int concurrencyLevel;
public TestConcurrentDataReadOperations(int concurrencyLevel) {
this.concurrencyLevel = concurrencyLevel;
}
@Parameterized.Parameters(name = "{index}")
public static Collection<?> testDataNumberOfConcurrentRun() {
return Arrays.asList(new Object[][] {{1}, {2}, {3}, {4}, {5}});
}
public static byte[] getRandomByteArrayData(int size) {
byte[] b = new byte[size];
Random rand = new Random();
rand.nextBytes(b);
return b;
}
private void setDispatcher(final ArrayList<CreateTestData> testData) {
getMockServer().setDispatcher(new Dispatcher() {
@Override
public MockResponse dispatch(RecordedRequest recordedRequest)
throws InterruptedException {
CreateTestData currentRequest = null;
for (CreateTestData local : testData) {
if (recordedRequest.getPath().contains(local.path.toString())) {
currentRequest = local;
break;
}
}
if (currentRequest == null) {
new MockResponse().setBody("Request data not found")
.setResponseCode(501);
}
if (recordedRequest.getRequestLine().contains("op=GETFILESTATUS")) {
return new MockResponse().setResponseCode(200).setBody(
TestADLResponseData
.getGetFileStatusJSONResponse(currentRequest.data.length));
}
if (recordedRequest.getRequestLine().contains("op=OPEN")) {
String request = recordedRequest.getRequestLine();
int offset = 0;
int byteCount = 0;
Pattern pattern = Pattern.compile("offset=([0-9]+)");
Matcher matcher = pattern.matcher(request);
if (matcher.find()) {
LOG.debug(matcher.group(1));
offset = Integer.parseInt(matcher.group(1));
}
pattern = Pattern.compile("length=([0-9]+)");
matcher = pattern.matcher(request);
if (matcher.find()) {
LOG.debug(matcher.group(1));
byteCount = Integer.parseInt(matcher.group(1));
}
Buffer buf = new Buffer();
buf.write(currentRequest.data, offset,
Math.min(currentRequest.data.length - offset, byteCount));
return new MockResponse().setResponseCode(200)
.setChunkedBody(buf, 4 * 1024 * 1024);
}
return new MockResponse().setBody("NOT SUPPORTED").setResponseCode(501);
}
});
}
@Before
public void resetHandle() {
commonHandle = null;
}
@Test
public void testParallelReadOnDifferentStreams()
throws IOException, InterruptedException, ExecutionException {
ArrayList<CreateTestData> createTestData = new ArrayList<CreateTestData>();
Random random = new Random();
for (int i = 0; i < concurrencyLevel; i++) {
CreateTestData testData = new CreateTestData();
testData
.set(new Path("/test/concurrentRead/" + UUID.randomUUID().toString()),
getRandomByteArrayData(random.nextInt(1 * 1024 * 1024)));
createTestData.add(testData);
}
setDispatcher(createTestData);
ArrayList<ReadTestData> readTestData = new ArrayList<ReadTestData>();
for (CreateTestData local : createTestData) {
ReadTestData localReadData = new ReadTestData();
localReadData.set(local.path, local.data, 0);
readTestData.add(localReadData);
}
runReadTest(readTestData, false);
}
@Test
public void testParallelReadOnSameStreams()
throws IOException, InterruptedException, ExecutionException {
ArrayList<CreateTestData> createTestData = new ArrayList<CreateTestData>();
Random random = new Random();
for (int i = 0; i < 1; i++) {
CreateTestData testData = new CreateTestData();
testData
.set(new Path("/test/concurrentRead/" + UUID.randomUUID().toString()),
getRandomByteArrayData(1024 * 1024));
createTestData.add(testData);
}
setDispatcher(createTestData);
ArrayList<ReadTestData> readTestData = new ArrayList<ReadTestData>();
ByteArrayInputStream buffered = new ByteArrayInputStream(
createTestData.get(0).data);
ReadTestData readInitially = new ReadTestData();
byte[] initialData = new byte[1024 * 1024];
buffered.read(initialData);
readInitially.set(createTestData.get(0).path, initialData, 0);
readTestData.add(readInitially);
runReadTest(readTestData, false);
readTestData.clear();
for (int i = 0; i < concurrencyLevel * 5; i++) {
ReadTestData localReadData = new ReadTestData();
int offset = random.nextInt((1024 * 1024) - 1);
int length = 1024 * 1024 - offset;
byte[] expectedData = new byte[length];
buffered.reset();
buffered.skip(offset);
buffered.read(expectedData);
localReadData.set(createTestData.get(0).path, expectedData, offset);
readTestData.add(localReadData);
}
runReadTest(readTestData, true);
}
void runReadTest(ArrayList<ReadTestData> testData, boolean useSameStream)
throws InterruptedException, ExecutionException {
ExecutorService executor = Executors.newFixedThreadPool(testData.size());
Future[] subtasks = new Future[testData.size()];
for (int i = 0; i < testData.size(); i++) {
subtasks[i] = executor.submit(
new ReadConcurrentRunnable(testData.get(i).data, testData.get(i).path,
testData.get(i).offset, useSameStream));
}
executor.shutdown();
// wait until all tasks are finished
executor.awaitTermination(120, TimeUnit.SECONDS);
for (int i = 0; i < testData.size(); ++i) {
Assert.assertTrue((Boolean) subtasks[i].get());
}
}
class ReadTestData {
private Path path;
private byte[] data;
private int offset;
public void set(Path filePath, byte[] dataToBeRead, int fromOffset) {
this.path = filePath;
this.data = dataToBeRead;
this.offset = fromOffset;
}
}
class CreateTestData {
private Path path;
private byte[] data;
public void set(Path filePath, byte[] dataToBeWritten) {
this.path = filePath;
this.data = dataToBeWritten;
}
}
class ReadConcurrentRunnable implements Callable<Boolean> {
private Path path;
private int offset;
private byte[] expectedData;
private boolean useSameStream;
public ReadConcurrentRunnable(byte[] expectedData, Path path, int offset,
boolean useSameStream) {
this.path = path;
this.offset = offset;
this.expectedData = expectedData;
this.useSameStream = useSameStream;
}
public Boolean call() throws IOException {
try {
FSDataInputStream in;
if (useSameStream) {
synchronized (LOCK) {
if (commonHandle == null) {
commonHandle = getMockAdlFileSystem().open(path);
}
in = commonHandle;
}
} else {
in = getMockAdlFileSystem().open(path);
}
byte[] actualData = new byte[expectedData.length];
in.readFully(offset, actualData);
Assert.assertArrayEquals("Path :" + path.toString() + " did not match.",
expectedData, actualData);
if (!useSameStream) {
in.close();
}
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
}
}

View File

@ -1,140 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.adl;
import com.squareup.okhttp.mockwebserver.MockResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collection;
import static org.apache.hadoop.fs.adl.AdlConfKeys.ADL_BLOCK_SIZE;
import static org.apache.hadoop.fs.adl.AdlConfKeys
.AZURE_AD_TOKEN_PROVIDER_CLASS_KEY;
import static org.apache.hadoop.fs.adl.AdlConfKeys
.AZURE_AD_TOKEN_PROVIDER_TYPE_KEY;
/**
* Test access token provider behaviour with custom token provider and for token
* provider cache is enabled.
*/
@RunWith(Parameterized.class)
public class TestCustomTokenProvider extends AdlMockWebServer {
private static final long TEN_MINUTES_IN_MILIS = 600000;
private int backendCallCount;
private int expectedCallbackToAccessToken;
private TestableAdlFileSystem[] fileSystems;
private Class typeOfTokenProviderClass;
private long expiryFromNow;
private int fsObjectCount;
public TestCustomTokenProvider(Class typeOfTokenProviderClass,
long expiryFromNow, int fsObjectCount, int backendCallCount,
int expectedCallbackToAccessToken)
throws IllegalAccessException, InstantiationException, URISyntaxException,
IOException {
this.typeOfTokenProviderClass = typeOfTokenProviderClass;
this.expiryFromNow = expiryFromNow;
this.fsObjectCount = fsObjectCount;
this.backendCallCount = backendCallCount;
this.expectedCallbackToAccessToken = expectedCallbackToAccessToken;
}
@Parameterized.Parameters(name = "{index}")
public static Collection testDataForTokenProvider() {
return Arrays.asList(new Object[][] {
// Data set in order
// INPUT - CustomTokenProvider class to load
// INPUT - expiry time in milis. Subtract from current time
// INPUT - No. of FileSystem object
// INPUT - No. of backend calls per FileSystem object
// EXPECTED - Number of callbacks to get token after test finished.
{CustomMockTokenProvider.class, 0, 1, 1, 1},
{CustomMockTokenProvider.class, TEN_MINUTES_IN_MILIS, 1, 1, 1},
{CustomMockTokenProvider.class, TEN_MINUTES_IN_MILIS, 2, 1, 2},
{CustomMockTokenProvider.class, TEN_MINUTES_IN_MILIS, 10, 10, 10}});
}
/**
* Explicitly invoked init so that base class mock server is setup before
* test data initialization is done.
*
* @throws IOException
* @throws URISyntaxException
*/
public void init() throws IOException, URISyntaxException {
Configuration configuration = new Configuration();
configuration.setEnum(AZURE_AD_TOKEN_PROVIDER_TYPE_KEY,
TokenProviderType.Custom);
configuration.set(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
typeOfTokenProviderClass.getName());
fileSystems = new TestableAdlFileSystem[fsObjectCount];
URI uri = new URI("adl://localhost:" + getPort());
for (int i = 0; i < fsObjectCount; ++i) {
fileSystems[i] = new TestableAdlFileSystem();
fileSystems[i].initialize(uri, configuration);
((CustomMockTokenProvider) fileSystems[i].getAzureTokenProvider())
.setExpiryTimeInMillisAfter(expiryFromNow);
}
}
@Test
public void testCustomTokenManagement()
throws IOException, URISyntaxException {
int accessTokenCallbackDuringExec = 0;
init();
for (TestableAdlFileSystem tfs : fileSystems) {
for (int i = 0; i < backendCallCount; ++i) {
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getGetFileStatusJSONResponse()));
FileStatus fileStatus = tfs.getFileStatus(new Path("/test1/test2"));
Assert.assertTrue(fileStatus.isFile());
Assert.assertEquals("adl://" + getMockServer().getHostName() + ":" +
getMockServer().getPort() + "/test1/test2",
fileStatus.getPath().toString());
Assert.assertEquals(4194304, fileStatus.getLen());
Assert.assertEquals(ADL_BLOCK_SIZE, fileStatus.getBlockSize());
Assert.assertEquals(1, fileStatus.getReplication());
Assert
.assertEquals(new FsPermission("777"), fileStatus.getPermission());
Assert.assertEquals("NotSupportYet", fileStatus.getOwner());
Assert.assertEquals("NotSupportYet", fileStatus.getGroup());
}
accessTokenCallbackDuringExec += ((CustomMockTokenProvider) tfs
.getAzureTokenProvider()).getAccessTokenRequestCount();
}
Assert.assertEquals(expectedCallbackToAccessToken,
accessTokenCallbackDuringExec);
}
}

View File

@ -1,102 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl;
import com.squareup.okhttp.mockwebserver.MockResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Time;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URISyntaxException;
import static org.apache.hadoop.fs.adl.AdlConfKeys.ADL_BLOCK_SIZE;
/**
* This class is responsible for testing local getFileStatus implementation
* to cover correct parsing of successful and error JSON response
* from the server.
* Adls GetFileStatus operation is in detail covered in
* org.apache.hadoop.fs.adl.live testing package.
*/
public class TestGetFileStatus extends AdlMockWebServer {
private static final Logger LOG =
LoggerFactory.getLogger(TestGetFileStatus.class);
@Test
public void getFileStatusReturnsAsExpected()
throws URISyntaxException, IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getGetFileStatusJSONResponse()));
long startTime = Time.monotonicNow();
Path path = new Path("/test1/test2");
FileStatus fileStatus = getMockAdlFileSystem().getFileStatus(path);
long endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertTrue(fileStatus.isFile());
Assert.assertEquals("adl://" + getMockServer().getHostName() + ":" +
getMockServer().getPort() + "/test1/test2",
fileStatus.getPath().toString());
Assert.assertEquals(4194304, fileStatus.getLen());
Assert.assertEquals(ADL_BLOCK_SIZE, fileStatus.getBlockSize());
Assert.assertEquals(1, fileStatus.getReplication());
Assert.assertEquals(new FsPermission("777"), fileStatus.getPermission());
Assert.assertEquals("NotSupportYet", fileStatus.getOwner());
Assert.assertEquals("NotSupportYet", fileStatus.getGroup());
Assert.assertTrue(path + " should have Acl!", fileStatus.hasAcl());
Assert.assertFalse(path + " should not be encrypted!",
fileStatus.isEncrypted());
Assert.assertFalse(path + " should not be erasure coded!",
fileStatus.isErasureCoded());
}
@Test
public void getFileStatusAclBit() throws URISyntaxException, IOException {
// With ACLBIT set to true
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getGetFileStatusJSONResponse(true)));
long startTime = Time.monotonicNow();
FileStatus fileStatus = getMockAdlFileSystem()
.getFileStatus(new Path("/test1/test2"));
long endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertTrue(fileStatus.isFile());
Assert.assertTrue(fileStatus.hasAcl());
Assert.assertTrue(fileStatus.getPermission().getAclBit());
// With ACLBIT set to false
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getGetFileStatusJSONResponse(false)));
startTime = Time.monotonicNow();
fileStatus = getMockAdlFileSystem()
.getFileStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertTrue(fileStatus.isFile());
Assert.assertFalse(fileStatus.hasAcl());
Assert.assertFalse(fileStatus.getPermission().getAclBit());
}
}

View File

@ -1,137 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl;
import com.squareup.okhttp.mockwebserver.MockResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Time;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URISyntaxException;
/**
* This class is responsible for testing local listStatus implementation to
* cover correct parsing of successful and error JSON response from the server.
* Adls ListStatus functionality is in detail covered in
* org.apache.hadoop.fs.adl.live testing package.
*/
public class TestListStatus extends AdlMockWebServer {
private static final Logger LOG = LoggerFactory
.getLogger(TestListStatus.class);
@Test
public void listStatusReturnsAsExpected() throws IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(10)));
long startTime = Time.monotonicNow();
FileStatus[] ls = getMockAdlFileSystem()
.listStatus(new Path("/test1/test2"));
long endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertEquals(10, ls.length);
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(200)));
startTime = Time.monotonicNow();
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertEquals(200, ls.length);
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(2048)));
startTime = Time.monotonicNow();
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertEquals(2048, ls.length);
}
@Test
public void listStatusOnFailure() throws IOException {
getMockServer().enqueue(new MockResponse().setResponseCode(403).setBody(
TestADLResponseData.getErrorIllegalArgumentExceptionJSONResponse()));
FileStatus[] ls = null;
long startTime = Time.monotonicNow();
try {
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
} catch (IOException e) {
Assert.assertTrue(e.getMessage().contains("Invalid"));
}
long endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
// SDK may increase number of retry attempts before error is propagated
// to caller. Adding max 10 error responses in the queue to align with SDK.
for (int i = 0; i < 10; ++i) {
getMockServer().enqueue(new MockResponse().setResponseCode(500).setBody(
TestADLResponseData.getErrorInternalServerExceptionJSONResponse()));
}
startTime = Time.monotonicNow();
try {
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
} catch (IOException e) {
Assert.assertTrue(e.getMessage().contains("Internal Server Error"));
}
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
}
@Test
public void listStatusAcl()
throws URISyntaxException, IOException {
// With ACLBIT set to true
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(true)));
FileStatus[] ls = null;
long startTime = Time.monotonicNow();
ls = getMockAdlFileSystem()
.listStatus(new Path("/test1/test2"));
long endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
for (int i = 0; i < ls.length; i++) {
Assert.assertTrue(ls[i].isDirectory());
Assert.assertTrue(ls[i].hasAcl());
Assert.assertTrue(ls[i].getPermission().getAclBit());
}
// With ACLBIT set to false
ls = null;
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(false)));
startTime = Time.monotonicNow();
ls = getMockAdlFileSystem()
.listStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
for (int i = 0; i < ls.length; i++) {
Assert.assertTrue(ls[i].isDirectory());
Assert.assertFalse(ls[i].hasAcl());
Assert.assertFalse(ls[i].getPermission().getAclBit());
}
}
}

View File

@ -1,30 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl;
/**
* Mock adl file storage subclass to mock adl storage on local http service.
*/
public class TestableAdlFileSystem extends AdlFileSystem {
@Override
protected String getTransportScheme() {
return "http";
}
}

View File

@ -1,71 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.common;
import com.squareup.okhttp.mockwebserver.MockResponse;
import java.util.ArrayList;
/**
* Supporting class to hold expected MockResponse object along with parameters
* for validation in test methods.
*/
public class ExpectedResponse {
private MockResponse response;
private ArrayList<String> expectedQueryParameters = new ArrayList<String>();
private int expectedBodySize;
private String httpRequestType;
public int getExpectedBodySize() {
return expectedBodySize;
}
public String getHttpRequestType() {
return httpRequestType;
}
public ArrayList<String> getExpectedQueryParameters() {
return expectedQueryParameters;
}
public MockResponse getResponse() {
return response;
}
ExpectedResponse set(MockResponse mockResponse) {
this.response = mockResponse;
return this;
}
ExpectedResponse addExpectedQueryParam(String param) {
expectedQueryParameters.add(param);
return this;
}
ExpectedResponse addExpectedBodySize(int bodySize) {
this.expectedBodySize = bodySize;
return this;
}
ExpectedResponse addExpectedHttpRequestType(String expectedHttpRequestType) {
this.httpRequestType = expectedHttpRequestType;
return this;
}
}

View File

@ -1,122 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.common;
import com.squareup.okhttp.mockwebserver.Dispatcher;
import com.squareup.okhttp.mockwebserver.MockResponse;
import com.squareup.okhttp.mockwebserver.RecordedRequest;
import okio.Buffer;
import org.apache.hadoop.fs.adl.TestADLResponseData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Supporting class for mock test to validate Adls read operation.
*/
public class TestDataForRead {
private static final Logger LOG = LoggerFactory
.getLogger(TestDataForRead.class);
private byte[] actualData;
private ArrayList<ExpectedResponse> responses;
private Dispatcher dispatcher;
private int intensityOfTest;
private boolean checkOfNoOfCalls;
private int expectedNoNetworkCall;
public TestDataForRead(final byte[] actualData, int expectedNoNetworkCall,
int intensityOfTest, boolean checkOfNoOfCalls) {
this.checkOfNoOfCalls = checkOfNoOfCalls;
this.actualData = actualData;
responses = new ArrayList<ExpectedResponse>();
this.expectedNoNetworkCall = expectedNoNetworkCall;
this.intensityOfTest = intensityOfTest;
dispatcher = new Dispatcher() {
@Override
public MockResponse dispatch(RecordedRequest recordedRequest)
throws InterruptedException {
if (recordedRequest.getRequestLine().contains("op=GETFILESTATUS")) {
return new MockResponse().setResponseCode(200).setBody(
TestADLResponseData
.getGetFileStatusJSONResponse(actualData.length));
}
if (recordedRequest.getRequestLine().contains("op=OPEN")) {
String request = recordedRequest.getRequestLine();
int offset = 0;
int byteCount = 0;
Pattern pattern = Pattern.compile("offset=([0-9]+)");
Matcher matcher = pattern.matcher(request);
if (matcher.find()) {
LOG.debug(matcher.group(1));
offset = Integer.parseInt(matcher.group(1));
}
pattern = Pattern.compile("length=([0-9]+)");
matcher = pattern.matcher(request);
if (matcher.find()) {
LOG.debug(matcher.group(1));
byteCount = Integer.parseInt(matcher.group(1));
}
Buffer buf = new Buffer();
buf.write(actualData, offset,
Math.min(actualData.length - offset, byteCount));
return new MockResponse().setResponseCode(200)
.setChunkedBody(buf, 4 * 1024 * 1024);
}
return new MockResponse().setBody("NOT SUPPORTED").setResponseCode(501);
}
};
}
public boolean isCheckOfNoOfCalls() {
return checkOfNoOfCalls;
}
public int getExpectedNoNetworkCall() {
return expectedNoNetworkCall;
}
public int getIntensityOfTest() {
return intensityOfTest;
}
public byte[] getActualData() {
return actualData;
}
public ArrayList<ExpectedResponse> getResponses() {
return responses;
}
public Dispatcher getDispatcher() {
return dispatcher;
}
}