diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java index 367308d421..5b079e9405 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java @@ -78,7 +78,7 @@ /** * HttpFSServer implementation of the FileSystemAccess FileSystem. - *
+ *
* This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
*/
@InterfaceAudience.Private
@@ -223,7 +223,7 @@ public String getMethod() {
/**
* Convenience method that creates a HttpURLConnection
for the
* HttpFSServer file system operations.
- *
* This methods performs and injects any needed authentication credentials
* via the {@link #getConnection(URL, String)} method
*
@@ -289,7 +289,7 @@ public HttpURLConnection run() throws Exception {
/**
* Convenience method that creates a HttpURLConnection
for the specified URL.
- *
* This methods performs and injects any needed authentication credentials.
*
* @param url url to connect to.
@@ -371,7 +371,7 @@ protected int getDefaultPort() {
/**
* HttpFSServer subclass of the FSDataInputStream
.
- *
* This implementation does not support the
* PositionReadable
and Seekable
methods.
*/
@@ -414,8 +414,8 @@ public boolean seekToNewSource(long targetPos) throws IOException {
/**
* Opens an FSDataInputStream at the indicated Path.
- *
does not support the
+ *
+ * IMPORTANT: the returned FSDataInputStream
does not support the
* PositionReadable
and Seekable
methods.
*
* @param f the file name to open
@@ -434,7 +434,7 @@ public FSDataInputStream open(Path f, int bufferSize) throws IOException {
/**
* HttpFSServer subclass of the FSDataOutputStream
.
- *
+ *
* This implementation closes the underlying HTTP connection validating the Http connection status
* at closing time.
*/
@@ -516,7 +516,7 @@ private FSDataOutputStream uploadData(String method, Path f, Map
/**
* Opens an FSDataOutputStream at the indicated Path with write-progress
* reporting.
- *
+ *
* IMPORTANT: The Progressable
parameter is not used.
*
* @param f the file name to open.
@@ -549,7 +549,7 @@ public FSDataOutputStream create(Path f, FsPermission permission,
/**
* Append to an existing file (optional operation).
- *
+ *
* IMPORTANT: The Progressable
parameter is not used.
*
* @param f the existing file to be appended.
@@ -838,7 +838,7 @@ public boolean setReplication(Path src, short replication)
* Modify the ACL entries for a file.
*
* @param path Path to modify
- * @param aclSpec List describing modifications
+ * @param aclSpec describing modifications
* @throws IOException
*/
@Override
@@ -855,7 +855,7 @@ public void modifyAclEntries(Path path, List aclSpec)
/**
* Remove the specified ACL entries from a file
* @param path Path to modify
- * @param aclSpec List describing entries to remove
+ * @param aclSpec describing entries to remove
* @throws IOException
*/
@Override
@@ -900,7 +900,7 @@ public void removeAcl(Path path) throws IOException {
/**
* Set the ACLs for the given file
* @param path Path to modify
- * @param aclSpec List describing modifications, must include
+ * @param aclSpec describing modifications, must include
* entries for user, group, and others for compatibility
* with permission bits.
* @throws IOException
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpsFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpsFSFileSystem.java
index a696cd8c11..0a2e08dad0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpsFSFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpsFSFileSystem.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.fs.http.client;
/**
- * HttpFSServer implementation of the FileSystemAccess FileSystem for SSL.
- *
- * This implementation allows a user to access HDFS over HTTPS via a
- * HttpFSServer server.
+ * HttpFSServer implementation of the FileSystemAccess FileSystem for SSL.
+ *
+ * This implementation allows a user to access HDFS over HTTPS via a
+ * HttpFSServer server.
*/
public class HttpsFSFileSystem extends HttpFSFileSystem {
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
index 67df9a8e64..836b4ce9ff 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
@@ -50,7 +50,7 @@ public class CheckUploadContentTypeFilter implements Filter {
/**
* Initializes the filter.
- *
+ *
* This implementation is a NOP.
*
* @param config filter configuration.
@@ -103,7 +103,7 @@ public void doFilter(ServletRequest request, ServletResponse response,
/**
* Destroys the filter.
- *
+ *
* This implementation is a NOP.
*/
@Override
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
index 8b332fc6e9..f0fe4c5596 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
@@ -44,7 +44,7 @@ public class HttpFSAuthenticationFilter
/**
* Returns the hadoop-auth configuration from HttpFSServer's configuration.
- *
+ *
* It returns all HttpFSServer's configuration properties prefixed with
* httpfs.authentication
. The httpfs.authentication
* prefix is removed from the returned property names.
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
index 3a8d9ada4e..aed6343123 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
@@ -43,7 +43,6 @@ public class HttpFSExceptionProvider extends ExceptionProvider {
/**
* Maps different exceptions thrown by HttpFSServer to HTTP status codes.
- *
*
* - SecurityException : HTTP UNAUTHORIZED
* - FileNotFoundException : HTTP NOT_FOUND
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
index f9eb454d9a..9103718346 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
@@ -85,7 +85,7 @@
/**
* Main class of HttpFSServer server.
- *
+ *
* The HttpFSServer
class uses Jersey JAX-RS to binds HTTP requests to the
* different operations.
*/
@@ -117,7 +117,7 @@ private T fsExecute(UserGroupInformation ugi, FileSystemAccess.FileSystemExe
/**
* Returns a filesystem instance. The fileystem instance is wired for release at the completion of
* the current Servlet request via the {@link FileSystemReleaseFilter}.
- *
+ *
* If a do-as user is specified, the current user must be a valid proxyuser, otherwise an
* AccessControlException
will be thrown.
*
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
index b7ae3015e3..66438b5f4a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
@@ -34,9 +34,9 @@
* HttpFSServer server, it is a javax.servlet.ServletContextListener
*
implementation that is wired in HttpFSServer's WAR
* WEB-INF/web.xml
.
- *
+ *
* It provides acces to the server context via the singleton {@link #get}.
- *
+ *
* All the configuration is loaded from configuration properties prefixed
* with httpfs.
.
*/
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
index f974159c63..467ca23565 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
@@ -61,7 +61,7 @@ private XException(ERROR error, String message, Throwable cause) {
/**
* Creates an XException using another XException as cause.
- *
+ *
* The error code and error message are extracted from the cause.
*
* @param cause exception cause.
@@ -95,7 +95,7 @@ public ERROR getError() {
/**
* Creates a message using a error message template and arguments.
- *
+ *
* The template must be in JDK MessageFormat
syntax
* (using {#} positional parameters).
*
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
index 088f90058d..9d9ce7a111 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
@@ -44,11 +44,11 @@ public BaseService(String prefix) {
/**
* Initializes the service.
- *
+ *
* It collects all service properties (properties having the
* #SERVER#.#SERVICE#.
prefix). The property names are then
* trimmed from the #SERVER#.#SERVICE#.
prefix.
- *
+ *
* After collecting the service properties it delegates to the
* {@link #init()} method.
*
@@ -75,7 +75,7 @@ public final void init(Server server) throws ServiceException {
/**
* Post initializes the service. This method is called by the
* {@link Server} after all services of the server have been initialized.
- *
+ *
* This method does a NOP.
*
* @throws ServiceException thrown if the service could not be
@@ -88,7 +88,7 @@ public void postInit() throws ServiceException {
/**
* Destroy the services. This method is called once, when the
* {@link Server} owning the service is being destroyed.
- *
+ *
* This method does a NOP.
*/
@Override
@@ -98,7 +98,7 @@ public void destroy() {
/**
* Returns the service dependencies of this service. The service will be
* instantiated only if all the service dependencies are already initialized.
- *
+ *
* This method returns an empty array (size 0)
*
* @return an empty array (size 0).
@@ -110,7 +110,7 @@ public Class[] getServiceDependencies() {
/**
* Notification callback when the server changes its status.
- *
+ *
* This method returns an empty array (size 0)
*
* @param oldStatus old server status.
@@ -154,7 +154,7 @@ protected String getPrefixedName(String name) {
/**
* Returns the service configuration properties. Property
* names are trimmed off from its prefix.
- *
+ *
* The sevice configuration properties are all properties
* with names starting with #SERVER#.#SERVICE#.
* in the server configuration.
@@ -169,7 +169,7 @@ protected Configuration getServiceConfig() {
/**
* Initializes the server.
- *
+ *
* This method is called by {@link #init(Server)} after all service properties
* (properties prefixed with
*
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
index d083831c58..5c1bb4f727 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
@@ -42,40 +42,39 @@
/**
* A Server class provides standard configuration, logging and {@link Service}
* lifecyle management.
- *
+ *
* A Server normally has a home directory, a configuration directory, a temp
* directory and logs directory.
- *
+ *
* The Server configuration is loaded from 2 overlapped files,
* #SERVER#-default.xml
and #SERVER#-site.xml
. The
* default file is loaded from the classpath, the site file is laoded from the
* configuration directory.
- *
+ *
* The Server collects all configuration properties prefixed with
* #SERVER#
. The property names are then trimmed from the
* #SERVER#
prefix.
- *
+ *
* The Server log configuration is loaded from the
* #SERVICE#-log4j.properties
file in the configuration directory.
- *
+ *
* The lifecycle of server is defined in by {@link Server.Status} enum.
* When a server is create, its status is UNDEF, when being initialized it is
* BOOTING, once initialization is complete by default transitions to NORMAL.
* The #SERVER#.startup.status
configuration property can be used
* to specify a different startup status (NORMAL, ADMIN or HALTED).
- *
+ *
* Services classes are defined in the #SERVER#.services
and
* #SERVER#.services.ext
properties. They are loaded in order
* (services first, then services.ext).
- *
+ *
* Before initializing the services, they are traversed and duplicate service
* interface are removed from the service list. The last service using a given
* interface wins (this enables a simple override mechanism).
- *
+ *
* After the services have been resoloved by interface de-duplication they are
* initialized in order. Once all services are initialized they are
* post-initialized (this enables late/conditional service bindings).
- *
*/
@InterfaceAudience.Private
public class Server {
@@ -152,7 +151,7 @@ public boolean isOperational() {
/**
* Creates a server instance.
- *
+ *
* The config, log and temp directories are all under the specified home directory.
*
* @param name server name.
@@ -177,9 +176,9 @@ public Server(String name, String homeDir, String configDir, String logDir, Stri
/**
* Creates a server instance.
- *
+ *
* The config, log and temp directories are all under the specified home directory.
- *
+ *
* It uses the provided configuration instead loading it from the config dir.
*
* @param name server name.
@@ -192,7 +191,7 @@ public Server(String name, String homeDir, Configuration config) {
/**
* Creates a server instance.
- *
+ *
* It uses the provided configuration instead loading it from the config dir.
*
* @param name server name.
@@ -250,9 +249,9 @@ public Status getStatus() {
/**
* Sets a new server status.
- *
+ *
* The status must be settable.
- *
+ *
* All services will be notified o the status change via the
* {@link Service#serverStatusChange(Server.Status, Server.Status)} method. If a service
* throws an exception during the notification, the server will be destroyed.
@@ -299,7 +298,7 @@ protected void ensureOperational() {
/**
* Convenience method that returns a resource as inputstream from the
* classpath.
- *
+ *
* It first attempts to use the Thread's context classloader and if not
* set it uses the ClassUtils
classloader.
*
@@ -319,7 +318,7 @@ static InputStream getResource(String name) {
/**
* Initializes the Server.
- *
+ *
* The initialization steps are:
*
* - It verifies the service home and temp directories exist
@@ -335,6 +334,7 @@ static InputStream getResource(String name) {
* - Initializes the services
* - Post-initializes the services
* - Sets the server startup status
+ *
*
* @throws ServerException thrown if the server could not be initialized.
*/
@@ -625,7 +625,7 @@ protected void destroyServices() {
/**
* Destroys the server.
- *
+ *
* All services are destroyed in reverse order of initialization, then the
* Log4j framework is shutdown.
*/
@@ -651,7 +651,7 @@ public String getName() {
/**
* Returns the server prefix for server configuration properties.
- *
+ *
* By default it is the server name.
*
* @return the prefix for server configuration properties.
@@ -733,10 +733,10 @@ public T get(Class serviceKlass) {
/**
* Adds a service programmatically.
- *
+ *
* If a service with the same interface exists, it will be destroyed and
* removed before the given one is initialized and added.
- *
+ *
* If an exception is thrown the server is destroyed.
*
* @param klass service class to add.
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
index 827bcff891..cf73979f64 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
@@ -33,7 +33,7 @@
/**
* The FileSystemReleaseFilter
releases back to the
* {@link FileSystemAccess} service a FileSystem
instance.
- *
+ *
* This filter is useful in situations where a servlet request
* is streaming out HDFS data and the corresponding filesystem
* instance have to be closed after the streaming completes.
@@ -44,7 +44,7 @@ public abstract class FileSystemReleaseFilter implements Filter {
/**
* Initializes the filter.
- *
+ *
* This implementation is a NOP.
*
* @param filterConfig filter configuration.
@@ -83,7 +83,7 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo
/**
* Destroys the filter.
- *
+ *
* This implementation is a NOP.
*/
@Override
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
index dd395f6749..64f4926f5a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
@@ -43,7 +43,7 @@ public class HostnameFilter implements Filter {
/**
* Initializes the filter.
- *
+ *
* This implementation is a NOP.
*
* @param config filter configuration.
@@ -56,7 +56,7 @@ public void init(FilterConfig config) throws ServletException {
/**
* Resolves the requester hostname and delegates the request to the chain.
- *
+ *
* The requester hostname is available via the {@link #get} method.
*
* @param request servlet request.
@@ -101,7 +101,7 @@ public static String get() {
/**
* Destroys the filter.
- *
+ *
* This implementation is a NOP.
*/
@Override
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
index 07b552d7ec..156cf64ab0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
@@ -33,7 +33,7 @@
/**
* Filter that sets request contextual information for the slf4j MDC.
- *
+ *
* It sets the following values:
*
* - hostname: if the {@link HostnameFilter} is present and configured
@@ -48,7 +48,7 @@ public class MDCFilter implements Filter {
/**
* Initializes the filter.
- *
+ *
* This implementation is a NOP.
*
* @param config filter configuration.
@@ -93,7 +93,7 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha
/**
* Destroys the filter.
- *
+ *
* This implementation is a NOP.
*/
@Override
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
index 9b0ea2a11e..cd1659383b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
@@ -75,21 +75,21 @@ protected ServerWebApp(String name, String homeDir, Configuration config) {
/**
* Constructor. Subclasses must have a default constructor specifying
* the server name.
- *
+ *
* The server name is used to resolve the Java System properties that define
* the server home, config, log and temp directories.
- *
+ *
* The home directory is looked in the Java System property
* #SERVER_NAME#.home.dir
.
- *
+ *
* The config directory is looked in the Java System property
* #SERVER_NAME#.config.dir
, if not defined it resolves to
* the #SERVER_HOME_DIR#/conf
directory.
- *
+ *
* The log directory is looked in the Java System property
* #SERVER_NAME#.log.dir
, if not defined it resolves to
* the #SERVER_HOME_DIR#/log
directory.
- *
+ *
* The temp directory is looked in the Java System property
* #SERVER_NAME#.temp.dir
, if not defined it resolves to
* the #SERVER_HOME_DIR#/temp
directory.
@@ -105,7 +105,7 @@ public ServerWebApp(String name) {
/**
* Returns the server home directory.
- *
+ *
* It is looked up in the Java System property
* #SERVER_NAME#.home.dir
.
*
@@ -159,15 +159,15 @@ public void contextInitialized(ServletContextEvent event) {
}
/**
- * Resolves the host & port InetSocketAddress the web server is listening to.
- *
+ * Resolves the host and port InetSocketAddress the web server is listening to.
+ *
* This implementation looks for the following 2 properties:
*
* - #SERVER_NAME#.http.hostname
* - #SERVER_NAME#.http.port
*
*
- * @return the host & port InetSocketAddress the web server is listening to.
+ * @return the host and port InetSocketAddress the web server is listening to.
* @throws ServerException thrown if any of the above 2 properties is not defined.
*/
protected InetSocketAddress resolveAuthority() throws ServerException {
@@ -217,7 +217,7 @@ public InetSocketAddress getAuthority() throws ServerException {
/**
* Sets an alternate hostname:port InetSocketAddress to use.
- *
+ *
* For testing purposes.
*
* @param authority alterante authority.
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
index a398e75845..31666e83e2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
@@ -26,7 +26,7 @@
/**
* Utility methods to check preconditions.
- *
+ *
* Commonly used for method arguments preconditions.
*/
@InterfaceAudience.Private
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
index 660eae0835..6611dd22fe 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
@@ -90,7 +90,7 @@ public static Configuration resolve(Configuration conf) {
/**
* Create a configuration from an InputStream.
- *
+ *
* ERROR canibalized from Configuration.loadResource()
.
*
* @param is inputstream to read the configuration from.
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
index 0f16a9b53e..e0f62002c7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
@@ -26,7 +26,7 @@
/**
* Class that contains all parsed JAX-RS parameters.
- *
+ *
* Instances are created by the {@link ParametersProvider} class.
*/
@InterfaceAudience.Private
@@ -63,7 +63,7 @@ public > V get(String name, Class klass) {
*
* @param name parameter name.
* @param klass class of the parameter, used for value casting.
- * @return List the values of the parameter.
+ * @return the values of the parameter.
*/
@SuppressWarnings("unchecked")
public > List getValues(String name, Class klass) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/PrivilegedNfsGatewayStarter.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/PrivilegedNfsGatewayStarter.java
index 98862eda5e..3934d7c494 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/PrivilegedNfsGatewayStarter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/PrivilegedNfsGatewayStarter.java
@@ -26,7 +26,7 @@
/**
* This class is used to allow the initial registration of the NFS gateway with
- * the system portmap daemon to come from a privileged (< 1024) port. This is
+ * the system portmap daemon to come from a privileged (< 1024) port. This is
* necessary on certain operating systems to work around this bug in rpcbind:
*
* Red Hat: https://bugzilla.redhat.com/show_bug.cgi?id=731542
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 9f3f9ee23b..1db358ffb0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -564,6 +564,9 @@ Release 2.7.0 - UNRELEASED
HDFS-7502. Fix findbugs warning in hdfs-nfs project.
(Brandon Li via wheat9)
+ HDFS-5578. [JDK8] Fix Javadoc errors caused by incorrect or illegal tags
+ in doc comments. (Andrew Purtell via wheat9)
+
Release 2.6.1 - UNRELEASED
INCOMPATIBLE CHANGES