diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java index 70994b3e4f..5e974c8e13 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java @@ -404,7 +404,7 @@ public Response post(InputStream is, String doAs = params.get(DoAsParam.NAME, DoAsParam.class); switch (op.value()) { case APPEND: { - boolean hasData = params.get(DataParam.NAME, DataParam.class); + Boolean hasData = params.get(DataParam.NAME, DataParam.class); if (!hasData) { response = Response.temporaryRedirect( createUploadRedirectionURL(uriInfo, @@ -478,7 +478,7 @@ public Response put(InputStream is, String doAs = params.get(DoAsParam.NAME, DoAsParam.class); switch (op.value()) { case CREATE: { - boolean hasData = params.get(DataParam.NAME, DataParam.class); + Boolean hasData = params.get(DataParam.NAME, DataParam.class); if (!hasData) { response = Response.temporaryRedirect( createUploadRedirectionURL(uriInfo, @@ -486,11 +486,11 @@ public Response put(InputStream is, } else { Short permission = params.get(PermissionParam.NAME, PermissionParam.class); - boolean override = params.get(OverwriteParam.NAME, + Boolean override = params.get(OverwriteParam.NAME, OverwriteParam.class); - short replication = params.get(ReplicationParam.NAME, + Short replication = params.get(ReplicationParam.NAME, ReplicationParam.class); - long blockSize = params.get(BlockSizeParam.NAME, + Long blockSize = params.get(BlockSizeParam.NAME, BlockSizeParam.class); FSOperations.FSCreate command = new FSOperations.FSCreate(is, path, permission, override, @@ -543,7 +543,7 @@ public Response put(InputStream is, break; } case SETREPLICATION: { - short replication = params.get(ReplicationParam.NAME, + Short replication = params.get(ReplicationParam.NAME, ReplicationParam.class); FSOperations.FSSetReplication command = new FSOperations.FSSetReplication(path, replication); @@ -553,9 +553,9 @@ public Response put(InputStream is, break; } case SETTIMES: { - long modifiedTime = params.get(ModifiedTimeParam.NAME, + Long modifiedTime = params.get(ModifiedTimeParam.NAME, ModifiedTimeParam.class); - long accessTime = params.get(AccessTimeParam.NAME, + Long accessTime = params.get(AccessTimeParam.NAME, AccessTimeParam.class); FSOperations.FSSetTimes command = new FSOperations.FSSetTimes(path, modifiedTime, accessTime); diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index bd310974c9..c9e38e6222 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -404,6 +404,10 @@ Branch-2 ( Unreleased changes ) HDFS-3491. HttpFs does not set permissions correctly (tucu) + HDFS-3580. incompatible types; no instance(s) of type variable(s) V exist + so that V conforms to boolean compiling HttpFSServer.java with OpenJDK + (adi2 via tucu) + Release 2.0.0-alpha - 05-23-2012 INCOMPATIBLE CHANGES