HADOOP-17970. unguava: remove Preconditions from hdfs-projects modules (#3566)
This commit is contained in:
parent
9cfd8d0a83
commit
62c86eaa0e
@ -178,6 +178,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>de.skuzzle.enforcer</groupId>
|
||||
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||
<version>${restrict-imports.enforcer.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>banned-illegal-imports</id>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||
<includeTestCode>true</includeTestCode>
|
||||
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||
<bannedImports>
|
||||
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||
</bannedImports>
|
||||
</restrictImports>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
@ -43,7 +43,7 @@
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -196,7 +196,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
||||
|
||||
/********************************************************
|
||||
|
@ -73,7 +73,7 @@
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_DEFAULT;
|
||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_KEY;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.CreateFlag;
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
import org.apache.hadoop.net.DomainNameResolver;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.SignedBytes;
|
||||
import java.net.URISyntaxException;
|
||||
@ -145,7 +145,10 @@ public static byte[][] bytes2byteArray(byte[] bytes) {
|
||||
*/
|
||||
public static byte[][] bytes2byteArray(byte[] bytes, int len,
|
||||
byte separator) {
|
||||
Preconditions.checkPositionIndex(len, bytes.length);
|
||||
if (len < 0 || len > bytes.length) {
|
||||
throw new IndexOutOfBoundsException(
|
||||
"Incorrect index [len, size] [" + len + ", " + bytes.length + "]");
|
||||
}
|
||||
if (len == 0) {
|
||||
return new byte[][]{null};
|
||||
}
|
||||
|
@ -22,7 +22,7 @@
|
||||
import org.apache.hadoop.ipc.RpcNoSuchMethodException;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.commons.collections.list.TreeList;
|
||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -36,7 +36,7 @@
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
||||
|
@ -24,7 +24,7 @@
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.LinkedListMultimap;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.fs.ChecksumException;
|
||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
|
@ -27,7 +27,7 @@
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
|
@ -31,7 +31,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* The Hdfs implementation of {@link FSDataInputStream}.
|
||||
|
@ -28,7 +28,7 @@
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.hdfs.DFSOutputStream;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* The Hdfs implementation of {@link FSDataOutputStream}.
|
||||
|
@ -75,7 +75,7 @@
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.client.impl;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.ReadOption;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.client.impl;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
|
@ -25,7 +25,7 @@
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||
|
||||
/**
|
||||
|
@ -26,7 +26,7 @@
|
||||
import org.apache.hadoop.fs.InvalidRequestException;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.tracing.TraceScope;
|
||||
import org.apache.hadoop.tracing.Tracer;
|
||||
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -18,7 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
|
@ -22,7 +22,7 @@
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
|
||||
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReencryptionInfoProto;
|
||||
|
@ -20,7 +20,7 @@
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReencryptionInfoProto;
|
||||
|
||||
/**
|
||||
|
@ -27,7 +27,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PacketHeaderProto;
|
||||
import org.apache.hadoop.hdfs.util.ByteBufferOutputStream;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Shorts;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
||||
import org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException;
|
||||
|
@ -31,7 +31,7 @@
|
||||
import org.apache.hadoop.util.DirectBufferPool;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -28,7 +28,7 @@
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
|
||||
|
@ -22,7 +22,7 @@
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.ObjectReader;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
|
@ -23,7 +23,7 @@
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.ObjectReader;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
|
@ -26,7 +26,7 @@
|
||||
import org.apache.hadoop.net.unix.DomainSocket;
|
||||
import org.apache.hadoop.net.unix.DomainSocketWatcher;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* DfsClientShm is a subclass of ShortCircuitShm which is used by the
|
||||
|
@ -44,7 +44,7 @@
|
||||
import org.apache.hadoop.net.unix.DomainSocketWatcher;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -30,7 +30,7 @@
|
||||
import org.apache.hadoop.net.unix.DomainSocket;
|
||||
import org.apache.hadoop.util.PerformanceAdvisory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.cache.Cache;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
|
||||
|
||||
|
@ -55,7 +55,7 @@
|
||||
import org.apache.hadoop.util.Waitable;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
|
@ -32,7 +32,7 @@
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -39,7 +39,7 @@
|
||||
|
||||
import sun.misc.Unsafe;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ComparisonChain;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
||||
|
||||
|
@ -26,7 +26,7 @@
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -28,7 +28,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||
import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.DFSStripedOutputStream;
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.ObjectReader;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.fs.ContentSummary;
|
||||
import org.apache.hadoop.fs.FileChecksum;
|
||||
|
@ -138,7 +138,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
|
||||
|
||||
|
@ -343,6 +343,38 @@
|
||||
<excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>de.skuzzle.enforcer</groupId>
|
||||
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||
<version>${restrict-imports.enforcer.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>banned-illegal-imports</id>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||
<includeTestCode>true</includeTestCode>
|
||||
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||
<bannedImports>
|
||||
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||
</bannedImports>
|
||||
</restrictImports>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
@ -73,7 +73,7 @@
|
||||
import org.json.simple.parser.JSONParser;
|
||||
import org.json.simple.parser.ParseException;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
|
@ -219,6 +219,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>de.skuzzle.enforcer</groupId>
|
||||
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||
<version>${restrict-imports.enforcer.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>banned-illegal-imports</id>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||
<includeTestCode>true</includeTestCode>
|
||||
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||
<bannedImports>
|
||||
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||
</bannedImports>
|
||||
</restrictImports>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
@ -29,7 +29,7 @@
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.hdfs.DFSClient;
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* OffsetRange is the range of read/write request. A single point (e.g.,[5,5])
|
||||
|
@ -58,7 +58,7 @@
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -31,7 +31,7 @@
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
|
||||
/**
|
||||
|
@ -30,7 +30,7 @@
|
||||
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* WriteCtx saves the context of one write request, such as request, channel,
|
||||
|
@ -305,6 +305,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
</filesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>de.skuzzle.enforcer</groupId>
|
||||
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||
<version>${restrict-imports.enforcer.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>banned-illegal-imports</id>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||
<includeTestCode>true</includeTestCode>
|
||||
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||
<bannedImports>
|
||||
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||
</bannedImports>
|
||||
</restrictImports>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<profiles>
|
||||
|
@ -39,7 +39,7 @@
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* Order the destinations based on available space. This resolver uses a
|
||||
|
@ -30,7 +30,7 @@
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -447,6 +447,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
</filesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>de.skuzzle.enforcer</groupId>
|
||||
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||
<version>${restrict-imports.enforcer.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>banned-illegal-imports</id>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||
<includeTestCode>true</includeTestCode>
|
||||
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||
<bannedImports>
|
||||
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||
</bannedImports>
|
||||
</restrictImports>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
@ -110,7 +110,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.protobuf.BlockingService;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@ -310,7 +310,11 @@ public static String byteArray2PathString(final byte[][] components,
|
||||
// specifically not using StringBuilder to more efficiently build
|
||||
// string w/o excessive byte[] copies and charset conversions.
|
||||
final int range = offset + length;
|
||||
Preconditions.checkPositionIndexes(offset, range, components.length);
|
||||
if (offset < 0 || range < offset || range > components.length) {
|
||||
throw new IndexOutOfBoundsException(
|
||||
"Incorrect index [offset, range, size] ["
|
||||
+ offset + ", " + range + ", " + components.length + "]");
|
||||
}
|
||||
if (length == 0) {
|
||||
return "";
|
||||
}
|
||||
|
@ -57,7 +57,7 @@
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.net;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.net;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
||||
import org.apache.hadoop.net.InnerNode;
|
||||
|
@ -35,7 +35,7 @@
|
||||
import org.apache.hadoop.hdfs.server.datanode.Replica;
|
||||
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||
import org.apache.hadoop.thirdparty.protobuf.CodedInputStream;
|
||||
import org.apache.hadoop.thirdparty.protobuf.CodedOutputStream;
|
||||
|
@ -17,8 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
@ -28,7 +26,7 @@
|
||||
import org.apache.hadoop.util.IntrusiveCollection;
|
||||
import org.apache.hadoop.util.IntrusiveCollection.Element;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* Namenode class that tracks state related to a cached path.
|
||||
@ -63,7 +61,7 @@ public CacheDirective(long id, String path,
|
||||
short replication, long expiryTime) {
|
||||
Preconditions.checkArgument(id > 0);
|
||||
this.id = id;
|
||||
this.path = checkNotNull(path);
|
||||
this.path = Preconditions.checkNotNull(path);
|
||||
Preconditions.checkArgument(replication > 0);
|
||||
this.replication = replication;
|
||||
this.expiryTime = expiryTime;
|
||||
|
@ -61,7 +61,7 @@
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
|
@ -36,7 +36,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;
|
||||
|
@ -58,7 +58,7 @@
|
||||
import org.apache.hadoop.util.StopWatch;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.FutureCallback;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
|
||||
|
@ -30,7 +30,7 @@
|
||||
import org.apache.hadoop.util.Timer;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.FutureCallback;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
|
||||
|
@ -23,7 +23,7 @@
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* Exception thrown when too many exceptions occur while gathering
|
||||
|
@ -61,7 +61,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
|
||||
|
||||
/**
|
||||
|
@ -23,7 +23,7 @@
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.SegmentStateProto;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ComparisonChain;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Booleans;
|
||||
|
||||
|
@ -73,7 +73,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
|
||||
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
|
||||
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.qjournal.server;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
|
@ -46,7 +46,7 @@
|
||||
import org.apache.hadoop.util.Timer;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.HashMultiset;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Multiset;
|
||||
|
||||
|
@ -50,7 +50,7 @@
|
||||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
|
||||
import org.apache.hadoop.security.token.delegation.DelegationKey;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||
|
||||
/**
|
||||
|
@ -17,7 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.balancer;
|
||||
|
||||
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkArgument;
|
||||
import static org.apache.hadoop.hdfs.protocol.BlockType.CONTIGUOUS;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -75,7 +74,7 @@
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/** <p>The balancer is a tool that balances disk space usage on an HDFS cluster
|
||||
* when some datanodes become full or when new empty nodes join the cluster.
|
||||
@ -998,7 +997,7 @@ static BalancerParameters parse(String[] args) {
|
||||
try {
|
||||
for(int i = 0; i < args.length; i++) {
|
||||
if ("-threshold".equalsIgnoreCase(args[i])) {
|
||||
checkArgument(++i < args.length,
|
||||
Preconditions.checkArgument(++i < args.length,
|
||||
"Threshold value is missing: args = " + Arrays.toString(args));
|
||||
try {
|
||||
double threshold = Double.parseDouble(args[i]);
|
||||
@ -1015,7 +1014,7 @@ static BalancerParameters parse(String[] args) {
|
||||
throw e;
|
||||
}
|
||||
} else if ("-policy".equalsIgnoreCase(args[i])) {
|
||||
checkArgument(++i < args.length,
|
||||
Preconditions.checkArgument(++i < args.length,
|
||||
"Policy value is missing: args = " + Arrays.toString(args));
|
||||
try {
|
||||
b.setBalancingPolicy(BalancingPolicy.parse(args[i]));
|
||||
@ -1036,7 +1035,7 @@ static BalancerParameters parse(String[] args) {
|
||||
i = processHostList(args, i, "source", sourceNodes);
|
||||
b.setSourceNodes(sourceNodes);
|
||||
} else if ("-blockpools".equalsIgnoreCase(args[i])) {
|
||||
checkArgument(
|
||||
Preconditions.checkArgument(
|
||||
++i < args.length,
|
||||
"blockpools value is missing: args = "
|
||||
+ Arrays.toString(args));
|
||||
@ -1045,7 +1044,7 @@ static BalancerParameters parse(String[] args) {
|
||||
+ blockpools.toString());
|
||||
b.setBlockpools(blockpools);
|
||||
} else if ("-idleiterations".equalsIgnoreCase(args[i])) {
|
||||
checkArgument(++i < args.length,
|
||||
Preconditions.checkArgument(++i < args.length,
|
||||
"idleiterations value is missing: args = " + Arrays
|
||||
.toString(args));
|
||||
int maxIdleIteration = Integer.parseInt(args[i]);
|
||||
@ -1061,7 +1060,7 @@ static BalancerParameters parse(String[] args) {
|
||||
b.setRunAsService(true);
|
||||
LOG.info("Balancer will run as a long running service");
|
||||
} else if ("-hotBlockTimeInterval".equalsIgnoreCase(args[i])) {
|
||||
checkArgument(++i < args.length,
|
||||
Preconditions.checkArgument(++i < args.length,
|
||||
"hotBlockTimeInterval value is missing: args = "
|
||||
+ Arrays.toString(args));
|
||||
long hotBlockTimeInterval = Long.parseLong(args[i]);
|
||||
@ -1077,7 +1076,7 @@ static BalancerParameters parse(String[] args) {
|
||||
+ Arrays.toString(args));
|
||||
}
|
||||
}
|
||||
checkArgument(excludedNodes == null || includedNodes == null,
|
||||
Preconditions.checkArgument(excludedNodes == null || includedNodes == null,
|
||||
"-exclude and -include options cannot be specified together.");
|
||||
} catch(RuntimeException e) {
|
||||
printUsage(System.err);
|
||||
|
@ -85,7 +85,7 @@
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/** Dispatching block replica moves between datanodes. */
|
||||
@InterfaceAudience.Private
|
||||
|
@ -31,7 +31,7 @@
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.RateLimiter;
|
||||
import org.apache.hadoop.ha.HAServiceProtocol;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
|
@ -29,7 +29,7 @@
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -18,7 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
import org.apache.hadoop.hdfs.protocol.BlockType;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
|
@ -22,7 +22,7 @@
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
import org.apache.hadoop.hdfs.protocol.BlockType;
|
||||
|
@ -126,7 +126,7 @@
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -36,7 +36,7 @@
|
||||
import org.apache.hadoop.util.Daemon;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -25,7 +25,7 @@
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.AddBlockFlag;
|
||||
|
@ -26,7 +26,7 @@
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.AddBlockFlag;
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
import static org.apache.hadoop.hdfs.server.blockmanagement.CorruptReplicasMap.Reason;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
|
||||
/**
|
||||
|
@ -55,7 +55,7 @@
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* Scans the namesystem, scheduling blocks to be cached as appropriate.
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||
import org.apache.hadoop.hdfs.server.namenode.INode;
|
||||
|
@ -17,7 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkArgument;
|
||||
import static org.apache.hadoop.util.Time.monotonicNow;
|
||||
|
||||
import java.util.Queue;
|
||||
@ -30,6 +29,7 @@
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||
import org.apache.hadoop.hdfs.server.namenode.Namesystem;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@ -105,7 +105,7 @@ void activate(Configuration conf) {
|
||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY,
|
||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_DEFAULT,
|
||||
TimeUnit.SECONDS);
|
||||
checkArgument(intervalSecs >= 0, "Cannot set a negative " +
|
||||
Preconditions.checkArgument(intervalSecs >= 0, "Cannot set a negative " +
|
||||
"value for " + DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY);
|
||||
|
||||
int blocksPerInterval = conf.getInt(
|
||||
@ -122,7 +122,7 @@ void activate(Configuration conf) {
|
||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY);
|
||||
}
|
||||
|
||||
checkArgument(blocksPerInterval > 0,
|
||||
Preconditions.checkArgument(blocksPerInterval > 0,
|
||||
"Must set a positive value for "
|
||||
+ DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY);
|
||||
|
||||
@ -130,8 +130,8 @@ void activate(Configuration conf) {
|
||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES,
|
||||
DFSConfigKeys
|
||||
.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES_DEFAULT);
|
||||
checkArgument(maxConcurrentTrackedNodes >= 0, "Cannot set a negative " +
|
||||
"value for "
|
||||
Preconditions.checkArgument(maxConcurrentTrackedNodes >= 0,
|
||||
"Cannot set a negative value for "
|
||||
+ DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES);
|
||||
|
||||
Class cls = null;
|
||||
|
@ -24,7 +24,7 @@
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
||||
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.HashMultimap;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Multimap;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.UnmodifiableIterator;
|
||||
|
@ -31,7 +31,7 @@
|
||||
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
||||
import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
|
@ -54,7 +54,7 @@
|
||||
import org.apache.hadoop.util.VersionInfo;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -54,7 +54,7 @@
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.hdfs.server.datanode;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
|
@ -17,7 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.datanode;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||
|
@ -33,7 +33,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
|
@ -48,7 +48,7 @@
|
||||
import org.apache.hadoop.util.Lists;
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* Manages storage for the set of BlockPoolSlices which share a particular
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user