HADOOP-17100. Replace Guava Supplier with Java8+ Supplier in Hadoop. Contributed by Ahmed Hussein.

This commit is contained in:
Ayush Saxena 2020-07-18 14:33:43 +05:30
parent 2ba44a73bf
commit 6bcb24d269
148 changed files with 152 additions and 152 deletions

View File

@ -123,7 +123,7 @@
<property name="regexp" value="true"/>
<property name="illegalPkgs" value="^sun\.[^.]+"/>
<property name="illegalClasses"
value="^com\.google\.common\.base\.(Optional|Function|Predicate), ^com\.google\.common\.collect\.(ImmutableListMultimap)"/>
value="^com\.google\.common\.base\.(Optional|Function|Predicate|Supplier), ^com\.google\.common\.collect\.(ImmutableListMultimap)"/>
</module>
<module name="RedundantImport"/>
<module name="UnusedImports"/>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.conf;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;

View File

@ -37,7 +37,7 @@
import org.junit.Assert;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.util.concurrent.Uninterruptibles;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -22,7 +22,7 @@
import java.net.InetSocketAddress;
import java.security.NoSuchAlgorithmException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.http;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import java.io.File;
import java.io.IOException;
import java.net.URI;

View File

@ -98,7 +98,7 @@
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.primitives.Bytes;
import com.google.common.primitives.Ints;
import org.slf4j.Logger;

View File

@ -36,7 +36,7 @@
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Iterables;
import org.apache.commons.configuration2.SubsetConfiguration;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.metrics2.lib;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.test.GenericTestUtils;

View File

@ -34,7 +34,7 @@
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;

View File

@ -21,7 +21,7 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.junit.BeforeClass;
import org.junit.Test;

View File

@ -24,7 +24,7 @@
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.curator.RetryPolicy;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;

View File

@ -32,12 +32,14 @@
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.Locale;
import java.util.Objects;
import java.util.Random;
import java.util.Set;
import java.util.Enumeration;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
@ -61,7 +63,6 @@
import org.mockito.stubbing.Answer;
import com.google.common.base.Joiner;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
/**
@ -88,7 +89,8 @@ public abstract class GenericTestUtils {
public static final String DEFAULT_TEST_DATA_PATH = "target/test/data/";
/**
* Error string used in {@link GenericTestUtils#waitFor(Supplier, int, int)}.
* Error string used in
* {@link GenericTestUtils#waitFor(Supplier, long, long)}.
*/
public static final String ERROR_MISSING_ARGUMENT =
"Input supplier interface should be initailized";
@ -380,9 +382,7 @@ public static void assertExceptionContains(String expectedText,
public static void waitFor(final Supplier<Boolean> check,
final long checkEveryMillis, final long waitForMillis)
throws TimeoutException, InterruptedException {
if (check == null) {
throw new NullPointerException(ERROR_MISSING_ARGUMENT);
}
Objects.requireNonNull(check, ERROR_MISSING_ARGUMENT);
if (waitForMillis < checkEveryMillis) {
throw new IllegalArgumentException(ERROR_INVALID_ARGUMENT);
}

View File

@ -23,7 +23,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.event.Level;
import static org.junit.Assert.assertEquals;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.tracing;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.htrace.core.Span;
import org.apache.htrace.core.SpanId;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.util;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider;
import org.junit.Assert;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.crypto.key.kms.server;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.cache.LoadingCache;
import org.apache.curator.test.TestingServer;
import org.apache.hadoop.conf.Configuration;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.client.impl;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSOutputStream;
import org.apache.hadoop.security.UserGroupInformation;

View File

@ -92,7 +92,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Helper utilities for testing HDFS Federation.

View File

@ -64,7 +64,7 @@
import org.junit.Test;
import org.mockito.Mockito;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Tests Router admin commands.

View File

@ -74,7 +74,7 @@
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Tests quota behaviors in Router-based Federation.

View File

@ -54,7 +54,7 @@
import org.junit.Test;
import org.junit.rules.Timeout;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Test retry behavior of the Router RPC Client.

View File

@ -130,7 +130,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Maps;
/**

View File

@ -72,7 +72,7 @@
import org.junit.Test;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* This class tests if EnhancedByteBufferAccess works correctly.

View File

@ -20,7 +20,7 @@
import java.io.IOException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;

View File

@ -76,7 +76,7 @@
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;

View File

@ -75,7 +75,7 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap;

View File

@ -26,7 +26,7 @@
import java.util.ArrayList;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -27,7 +27,7 @@
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;

View File

@ -34,7 +34,7 @@
import java.util.zip.DeflaterOutputStream;
import java.util.zip.GZIPOutputStream;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.RandomStringUtils;

View File

@ -44,7 +44,7 @@
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
public class TestDataTransferKeepalive {
final Configuration conf = new HdfsConfiguration();

View File

@ -41,7 +41,7 @@
import org.apache.hadoop.util.VersionInfo;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import java.net.InetSocketAddress;
import java.security.Permission;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -38,7 +38,7 @@
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import org.apache.commons.text.TextStringBuilder;
import org.apache.hadoop.fs.BlockLocation;

View File

@ -32,7 +32,7 @@
import java.util.List;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -19,7 +19,7 @@
import static org.junit.Assert.assertTrue;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
import org.apache.hadoop.crypto.key.kms.KMSDelegationToken;
import org.apache.hadoop.crypto.key.kms.LoadBalancingKMSClientProvider;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo;

View File

@ -29,7 +29,7 @@
import java.util.HashMap;
import java.util.Map;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -60,7 +60,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
/**

View File

@ -58,7 +58,7 @@
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;

View File

@ -36,7 +36,7 @@
import java.util.List;
import java.util.Scanner;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import java.io.IOException;
import java.util.Arrays;

View File

@ -24,7 +24,7 @@
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import java.io.IOException;
import java.io.InputStream;

View File

@ -59,7 +59,7 @@
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
/**

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.client.impl;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.client.impl.metrics.BlockReaderIoProvider;

View File

@ -28,7 +28,7 @@
import java.util.List;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -40,7 +40,7 @@
import org.junit.Test;
import org.mockito.Mockito;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
public class TestIPCLoggerChannel {
private static final Logger LOG = LoggerFactory.getLogger(

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.qjournal.server;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;

View File

@ -22,7 +22,7 @@
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_FULL_BLOCK_REPORT_LEASE_LENGTH_MS;
import com.google.common.base.Joiner;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.util.concurrent.Uninterruptibles;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.blockmanagement;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import java.util.ArrayList;
import java.util.Collection;

View File

@ -40,7 +40,7 @@
import org.mockito.Mockito;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Test if we can correctly delay the deletion of blocks.

View File

@ -31,7 +31,7 @@
import java.util.ArrayList;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;

View File

@ -46,7 +46,7 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
/**

View File

@ -45,7 +45,7 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.FakeTimer;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Maps;
import org.junit.Before;

View File

@ -37,7 +37,7 @@
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
import org.apache.hadoop.test.GenericTestUtils;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;

View File

@ -89,7 +89,7 @@
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;

View File

@ -117,7 +117,7 @@
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* This tests if sync all replicas in block recovery works correctly.

View File

@ -40,7 +40,7 @@
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.AppendTestUtil;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.datanode;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -71,7 +71,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Test suite covering lifeline protocol handling in the DataNode.

View File

@ -28,7 +28,7 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -34,7 +34,7 @@
import java.util.concurrent.atomic.AtomicInteger;
import java.util.List;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import net.jcip.annotations.NotThreadSafe;

View File

@ -54,7 +54,7 @@
import org.junit.Test;
import org.junit.rules.Timeout;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Test periodic logging of DataNode metrics.

View File

@ -30,7 +30,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;

View File

@ -91,7 +91,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Fine-grain testing of block files and locations after volume failure.

View File

@ -29,7 +29,7 @@
import java.util.ArrayList;
import java.util.concurrent.ThreadLocalRandom;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@
import java.net.Socket;
import java.util.concurrent.TimeUnit;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.datanode.checker;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.FakeTimer;

View File

@ -62,7 +62,7 @@
import org.junit.Test;
import org.slf4j.event.Level;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.primitives.Ints;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_FSDATASETCACHE_MAX_THREADS_PER_VOLUME_KEY;

View File

@ -91,7 +91,7 @@
import org.junit.Test;
import org.slf4j.event.Level;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.primitives.Ints;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_FSDATASETCACHE_MAX_THREADS_PER_VOLUME_KEY;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
import java.io.OutputStream;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DF;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;

View File

@ -62,7 +62,7 @@
import org.junit.Test;
import org.slf4j.event.Level;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.primitives.Ints;
/**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.datanode.metrics;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.metrics2.lib.MetricsTestHelper;

View File

@ -51,7 +51,7 @@
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Lists;
public class TestDataNodeUGIProvider {

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.hdfs.server.diskbalancer;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.ReconfigurationException;

View File

@ -20,7 +20,7 @@
package org.apache.hadoop.hdfs.server.diskbalancer;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.codec.digest.DigestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -95,7 +95,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.Maps;
public class TestMover {

View File

@ -40,7 +40,7 @@
import org.junit.rules.Timeout;
import org.mockito.Mockito;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import java.io.IOException;

View File

@ -56,7 +56,7 @@
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;

View File

@ -96,7 +96,7 @@
import org.junit.Test;
import org.mockito.Mockito;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
public class TestCacheDirectives {
static final Logger LOG = LoggerFactory.getLogger(TestCacheDirectives.class);

View File

@ -95,7 +95,7 @@
import org.mockito.stubbing.Answer;
import com.google.common.base.Joiner;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;

View File

@ -46,7 +46,7 @@
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
@RunWith(Parameterized.class)
public class TestEditLogAutoroll {

View File

@ -38,7 +38,7 @@
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;

View File

@ -59,7 +59,7 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.logging.impl.Log4JLogger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -30,7 +30,7 @@
import java.util.ArrayList;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.namenode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import com.google.common.util.concurrent.Uninterruptibles;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;

View File

@ -30,7 +30,7 @@
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.test.GenericTestUtils;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.junit.After;
import org.junit.Before;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger;
import org.slf4j.Logger;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -37,7 +37,7 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import java.io.IOException;
import java.util.List;

View File

@ -31,7 +31,7 @@
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;

View File

@ -49,7 +49,7 @@
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* End-to-end test case for upgrade domain

View File

@ -59,7 +59,7 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
/**
* Static utility functions useful for testing HA.

View File

@ -27,7 +27,7 @@
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.base.Supplier;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;

Some files were not shown because too many files have changed in this diff Show More