HADOOP-17743. Replace Guava Lists usage by Hadoop's own Lists in hadoop-common, hadoop-tools and cloud-storage projects (#3072)

This commit is contained in:
Viraj Jasani 2021-06-07 09:54:09 +05:30 committed by GitHub
parent 207c92753f
commit f4b24c68e7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
60 changed files with 411 additions and 65 deletions

View File

@ -32,6 +32,43 @@
<hadoop.component>cloud-storage</hadoop.component>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -96,6 +96,38 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -92,6 +92,38 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException;
import com.obs.services.model.AbortMultipartUploadRequest;
@ -53,6 +52,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathIOException;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.util.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -647,6 +647,38 @@
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -26,13 +26,13 @@
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY;

View File

@ -24,7 +24,7 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Objects;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
/**
* An AclStatus contains the ACL information of a specific file. AclStatus

View File

@ -23,7 +23,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
/**
* AclUtil contains utility methods for manipulating ACLs.

View File

@ -22,8 +22,6 @@
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -35,6 +33,7 @@
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.ScopedAclEntries;
import org.apache.hadoop.util.Lists;
/**
* Acl related operations

View File

@ -25,10 +25,10 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -57,7 +57,6 @@
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import com.sun.jersey.spi.container.servlet.ServletContainer;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
@ -82,6 +81,7 @@
import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
import org.apache.hadoop.security.ssl.FileMonitoringTimerTask;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;

View File

@ -22,12 +22,11 @@
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.MetricsVisitor;
import org.apache.hadoop.util.Lists;
/**
* Helper class to build MBeanInfo from metrics records

View File

@ -22,12 +22,13 @@
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.util.Lists;
import static org.apache.hadoop.metrics2.lib.Interns.*;
@InterfaceAudience.Private

View File

@ -21,8 +21,6 @@
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsCollector;
@ -30,6 +28,7 @@
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Time;
/**

View File

@ -30,7 +30,6 @@
import java.util.TimerTask;
import javax.management.ObjectName;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.*;
@ -58,6 +57,7 @@
import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder;
import org.apache.hadoop.metrics2.lib.MutableStat;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;

View File

@ -22,11 +22,10 @@
import java.net.InetSocketAddress;
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.Lists;
/**
* Helpers to handle server addresses

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Iterables;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
/**
* Simplified List implementation which stores elements as a list

View File

@ -29,7 +29,6 @@
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -29,7 +29,6 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
/**

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.conf;
import java.util.function.Supplier;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange;
import org.junit.Test;

View File

@ -33,9 +33,9 @@
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.Lists;
import org.mockito.Mockito;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -24,13 +24,12 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Shell;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
public class TestNodeFencer {
private HAServiceTarget MOCK_TARGET;

View File

@ -21,9 +21,9 @@
import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.util.Shell;
@ -223,8 +223,8 @@ public void testCommandAbbreviation() {
*/
private static class LogAnswer implements Answer {
private static final List<String> DELEGATE_METHODS = Lists.asList("error",
new String[]{"warn", "info", "debug", "trace"});
private static final List<String> DELEGATE_METHODS = Arrays.asList(
"error", "warn", "info", "debug", "trace");
@Override
public Object answer(InvocationOnMock invocation) {

View File

@ -42,7 +42,6 @@
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
public class TestApplicationClassLoader {

View File

@ -26,8 +26,6 @@
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
public class TestDirectBufferPool {
final org.apache.hadoop.util.DirectBufferPool pool = new org.apache.hadoop.util.DirectBufferPool();

View File

@ -260,7 +260,38 @@
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -20,7 +20,6 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -29,6 +28,7 @@
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.service.ServiceStateException;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.ZKUtil;
import org.apache.zookeeper.Environment;
import org.apache.zookeeper.ZooDefs;

View File

@ -467,6 +467,38 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -54,7 +54,7 @@
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -23,7 +23,7 @@
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -28,7 +28,7 @@
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.transfer.model.CopyResult;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -35,7 +35,6 @@
import java.util.List;
import com.amazonaws.services.dynamodbv2.xspec.ExpressionSpecBuilder;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -265,8 +264,8 @@ public int execute() throws ServiceLaunchException, IOException {
* @param <T> type of queue
*/
private <T> void pushAll(Deque<T> queue, List<T> entries) {
List<T> reversed = Lists.reverse(entries);
for (T t : reversed) {
Collections.reverse(entries);
for (T t : entries) {
queue.push(t);
}
}

View File

@ -67,7 +67,7 @@
import org.apache.hadoop.fs.s3a.impl.InternalConstants;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.MoreExecutors;;
import org.slf4j.Logger;

View File

@ -20,7 +20,7 @@
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.MultiObjectDeleteException;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.assertj.core.api.Assertions;
import org.junit.Assume;

View File

@ -32,7 +32,7 @@
import org.apache.hadoop.fs.contract.s3a.S3AContract;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.assertj.core.api.Assertions;
import org.junit.Assume;
import org.junit.Test;

View File

@ -25,7 +25,7 @@
import java.util.List;
import com.amazonaws.services.s3.model.PartETag;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.assertj.core.api.Assertions;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -22,7 +22,7 @@
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.junit.Assert;
import org.junit.Test;

View File

@ -44,7 +44,7 @@
import com.amazonaws.services.s3.model.MultipartUploadListing;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.model.UploadPartResult;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.junit.AfterClass;
import org.junit.Assert;

View File

@ -28,7 +28,7 @@
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.junit.After;
import org.junit.Test;

View File

@ -25,7 +25,7 @@
import java.util.UUID;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Sets;
import org.assertj.core.api.Assertions;
import org.junit.BeforeClass;

View File

@ -28,7 +28,7 @@
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.MultiObjectDeleteException;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.assertj.core.api.Assertions;
import org.junit.Before;
import org.junit.Test;

View File

@ -46,7 +46,7 @@
import com.amazonaws.services.dynamodbv2.model.Tag;
import com.amazonaws.services.dynamodbv2.model.TagResourceRequest;
import com.amazonaws.services.dynamodbv2.model.UntagResourceRequest;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.assertj.core.api.Assertions;
import org.apache.commons.collections.CollectionUtils;

View File

@ -21,11 +21,11 @@
import java.util.Comparator;
import java.util.List;
import org.apache.hadoop.util.Lists;
import org.junit.Test;
import org.apache.hadoop.fs.Path;
import static org.apache.hadoop.thirdparty.com.google.common.collect.Lists.newArrayList;
import static org.apache.hadoop.fs.s3a.s3guard.PathOrderComparators.TOPMOST_PATH_FIRST;
import static org.apache.hadoop.fs.s3a.s3guard.PathOrderComparators.TOPMOST_PATH_LAST;
import static org.assertj.core.api.Assertions.assertThat;
@ -119,13 +119,13 @@ public void testSortOrderConstant() throws Throwable {
List<Path> sort1 = verifySorted(ROOT, DIR_A, DIR_B,
DIR_A_FILE_1, DIR_A_FILE_2,
DIR_B_FILE_3, DIR_B_FILE_4);
List<Path> sort2 = newArrayList(sort1);
List<Path> sort2 = Lists.newArrayList(sort1);
assertSortsTo(sort2, sort1, true);
}
@Test
public void testSortReverse() throws Throwable {
List<Path> sort1 = newArrayList(
List<Path> sort1 = Lists.newArrayList(
ROOT,
DIR_A,
DIR_B,
@ -133,7 +133,7 @@ public void testSortReverse() throws Throwable {
DIR_A_FILE_2,
DIR_B_FILE_3,
DIR_B_FILE_4);
List<Path> expected = newArrayList(
List<Path> expected = Lists.newArrayList(
DIR_B_FILE_4,
DIR_B_FILE_3,
DIR_A_FILE_2,
@ -146,8 +146,8 @@ public void testSortReverse() throws Throwable {
private List<Path> verifySorted(Path... paths) {
List<Path> original = newArrayList(paths);
List<Path> sorted = newArrayList(paths);
List<Path> original = Lists.newArrayList(paths);
List<Path> sorted = Lists.newArrayList(paths);
assertSortsTo(original, sorted, true);
return sorted;
}

View File

@ -101,6 +101,39 @@
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>

View File

@ -22,7 +22,7 @@
import java.util.List;
import java.util.UUID;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.fs.azurebfs.oauth2.IdentityTransformer;
import org.apache.hadoop.fs.permission.AclEntry;
import org.junit.Test;

View File

@ -22,7 +22,7 @@
import java.lang.reflect.Field;
import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.junit.Assume;
import org.junit.Test;
import org.mockito.Mockito;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azurebfs;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import java.io.FileNotFoundException;
import java.util.List;

View File

@ -36,7 +36,7 @@
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.assertj.core.api.Assertions;
import org.junit.Assume;
import org.junit.Test;

View File

@ -223,6 +223,38 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -41,7 +41,7 @@
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.thirdparty.com.google.common.base.Objects;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
/**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.tools;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -43,7 +43,7 @@
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import java.io.FileNotFoundException;
import java.io.IOException;

View File

@ -137,6 +137,38 @@
</environmentVariables>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.tools.dynamometer;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
import java.io.IOException;
import java.nio.ByteBuffer;

View File

@ -21,7 +21,7 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import java.util.Optional;
import java.util.function.Supplier;
import org.apache.hadoop.tools.dynamometer.workloadgenerator.audit.AuditReplayMapper;

View File

@ -83,6 +83,38 @@
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.tools.dynamometer.workloadgenerator;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.tools.dynamometer.workloadgenerator.audit;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import java.util.Optional;
import java.util.function.Function;

View File

@ -70,6 +70,38 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Lists;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;