diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index cf1037a336..44e559f7bd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -69,6 +69,9 @@ Trunk (unreleased changes) HDFS-2322. the build fails in Windows because commons-daemon TAR cannot be fetched. (tucu) + HDFS-2427. Change the default permission in webhdfs to 755 and add range + check/validation for all parameters. (szetszwo) + BUG FIXES HDFS-2287. TestParallelRead has a small off-by-one bug. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index b5152b4558..1dd3c0fc26 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -66,7 +66,7 @@ import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; -import org.apache.hadoop.hdfs.web.resources.DstPathParam; +import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GroupParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam; @@ -201,8 +201,8 @@ public Response put( @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT) final PutOpParam op, - @QueryParam(DstPathParam.NAME) @DefaultValue(DstPathParam.DEFAULT) - final DstPathParam dstPath, + @QueryParam(DestinationParam.NAME) @DefaultValue(DestinationParam.DEFAULT) + final DestinationParam destination, @QueryParam(OwnerParam.NAME) @DefaultValue(OwnerParam.DEFAULT) final OwnerParam owner, @QueryParam(GroupParam.NAME) @DefaultValue(GroupParam.DEFAULT) @@ -227,7 +227,7 @@ public Response put( if (LOG.isTraceEnabled()) { LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", dstPath, owner, group, permission, + + Param.toSortedString(", ", destination, owner, group, permission, overwrite, bufferSize, replication, blockSize, modificationTime, accessTime, renameOptions)); } @@ -264,11 +264,11 @@ public Response run() throws IOException, URISyntaxException { { final EnumSet s = renameOptions.getValue(); if (s.isEmpty()) { - final boolean b = np.rename(fullpath, dstPath.getValue()); + final boolean b = np.rename(fullpath, destination.getValue()); final String js = JsonUtil.toJsonString("boolean", b); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } else { - np.rename2(fullpath, dstPath.getValue(), + np.rename2(fullpath, destination.getValue(), s.toArray(new Options.Rename[s.size()])); return Response.ok().type(MediaType.APPLICATION_JSON).build(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index b8e7e7e174..acf7b10fb5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -59,7 +59,7 @@ import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; -import org.apache.hadoop.hdfs.web.resources.DstPathParam; +import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GroupParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam; @@ -292,7 +292,7 @@ public boolean rename(final Path src, final Path dst) throws IOException { statistics.incrementWriteOps(1); final HttpOpParam.Op op = PutOpParam.Op.RENAME; final Map json = run(op, src, - new DstPathParam(makeQualified(dst).toUri().getPath())); + new DestinationParam(makeQualified(dst).toUri().getPath())); return (Boolean)json.get("boolean"); } @@ -302,7 +302,7 @@ public void rename(final Path src, final Path dst, final Options.Rename... options) throws IOException { statistics.incrementWriteOps(1); final HttpOpParam.Op op = PutOpParam.Op.RENAME; - run(op, src, new DstPathParam(makeQualified(dst).toUri().getPath()), + run(op, src, new DestinationParam(makeQualified(dst).toUri().getPath()), new RenameOptionSetParam(options)); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java index 8d82131c70..9bc938dee5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java @@ -31,7 +31,7 @@ public class AccessTimeParam extends LongParam { * @param value the parameter value. */ public AccessTimeParam(final Long value) { - super(DOMAIN, value); + super(DOMAIN, value, -1L, null); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java index 9611496807..4076746e34 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java @@ -36,7 +36,7 @@ public class BlockSizeParam extends LongParam { * @param value the parameter value. */ public BlockSizeParam(final Long value) { - super(DOMAIN, value); + super(DOMAIN, value, 1L, null); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java index 148834b102..376d7d8ef0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java @@ -34,7 +34,7 @@ public class BufferSizeParam extends IntegerParam { * @param value the parameter value. */ public BufferSizeParam(final Integer value) { - super(DOMAIN, value); + super(DOMAIN, value, 1, null); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DestinationParam.java similarity index 69% rename from hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java rename to hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DestinationParam.java index 5fa52456f9..67597385da 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DestinationParam.java @@ -20,20 +20,31 @@ import org.apache.hadoop.fs.Path; /** Destination path parameter. */ -public class DstPathParam extends StringParam { +public class DestinationParam extends StringParam { /** Parameter name. */ - public static final String NAME = "dstpath"; + public static final String NAME = "destination"; /** Default parameter value. */ public static final String DEFAULT = ""; private static final Domain DOMAIN = new Domain(NAME, null); + private static String validate(final String str) { + if (str == null || str.equals(DEFAULT)) { + return null; + } + if (!str.startsWith(Path.SEPARATOR)) { + throw new IllegalArgumentException("Invalid parameter value: " + NAME + + " = \"" + str + "\" is not an absolute path."); + } + return new Path(str).toUri().getPath(); + } + /** * Constructor. * @param str a string representation of the parameter value. */ - public DstPathParam(final String str) { - super(DOMAIN, str == null || str.equals(DEFAULT)? null: new Path(str).toUri().getPath()); + public DestinationParam(final String str) { + super(DOMAIN, validate(str)); } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/IntegerParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/IntegerParam.java index 5e89087610..b80b1a254a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/IntegerParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/IntegerParam.java @@ -19,8 +19,24 @@ /** Integer parameter. */ abstract class IntegerParam extends Param { - IntegerParam(final Domain domain, final Integer value) { + IntegerParam(final Domain domain, final Integer value, + final Integer min, final Integer max) { super(domain, value); + checkRange(min, max); + } + + private void checkRange(final Integer min, final Integer max) { + if (value == null) { + return; + } + if (min != null && value < min) { + throw new IllegalArgumentException("Invalid parameter range: " + getName() + + " = " + domain.toString(value) + " < " + domain.toString(min)); + } + if (max != null && value > max) { + throw new IllegalArgumentException("Invalid parameter range: " + getName() + + " = " + domain.toString(value) + " > " + domain.toString(max)); + } } @Override @@ -49,7 +65,12 @@ public String getDomain() { @Override Integer parse(final String str) { - return NULL.equals(str)? null: Integer.parseInt(str, radix); + try{ + return NULL.equals(str)? null: Integer.parseInt(str, radix); + } catch(NumberFormatException e) { + throw new IllegalArgumentException("Failed to parse \"" + str + + "\" as a radix-" + radix + " integer.", e); + } } /** Convert an Integer to a String. */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LengthParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LengthParam.java index 90d4f6289d..6c59ee5143 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LengthParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LengthParam.java @@ -31,7 +31,7 @@ public class LengthParam extends LongParam { * @param value the parameter value. */ public LengthParam(final Long value) { - super(DOMAIN, value); + super(DOMAIN, value, 0L, null); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java index 8a3e0f5e41..023402cfe0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java @@ -19,8 +19,23 @@ /** Long parameter. */ abstract class LongParam extends Param { - LongParam(final Domain domain, final Long value) { + LongParam(final Domain domain, final Long value, final Long min, final Long max) { super(domain, value); + checkRange(min, max); + } + + private void checkRange(final Long min, final Long max) { + if (value == null) { + return; + } + if (min != null && value < min) { + throw new IllegalArgumentException("Invalid parameter range: " + getName() + + " = " + domain.toString(value) + " < " + domain.toString(min)); + } + if (max != null && value > max) { + throw new IllegalArgumentException("Invalid parameter range: " + getName() + + " = " + domain.toString(value) + " > " + domain.toString(max)); + } } @Override @@ -49,7 +64,12 @@ public String getDomain() { @Override Long parse(final String str) { - return NULL.equals(str)? null: Long.parseLong(str, radix); + try { + return NULL.equals(str)? null: Long.parseLong(str, radix); + } catch(NumberFormatException e) { + throw new IllegalArgumentException("Failed to parse \"" + str + + "\" as a radix-" + radix + " long integer.", e); + } } /** Convert a Short to a String. */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java index a0e38a97e7..59911d70b8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java @@ -31,7 +31,7 @@ public class ModificationTimeParam extends LongParam { * @param value the parameter value. */ public ModificationTimeParam(final Long value) { - super(DOMAIN, value); + super(DOMAIN, value, -1L, null); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OffsetParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OffsetParam.java index 8b3654dbd8..6973787847 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OffsetParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OffsetParam.java @@ -31,7 +31,7 @@ public class OffsetParam extends LongParam { * @param value the parameter value. */ public OffsetParam(final Long value) { - super(DOMAIN, value); + super(DOMAIN, value, 0L, null); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java index 264e60226b..d283423fa0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java @@ -27,13 +27,15 @@ public class PermissionParam extends ShortParam { public static final String DEFAULT = NULL; private static final Domain DOMAIN = new Domain(NAME, 8); + + private static final short DEFAULT_PERMISSION = 0755; /** * Constructor. * @param value the parameter value. */ public PermissionParam(final FsPermission value) { - super(DOMAIN, value == null? null: value.toShort()); + super(DOMAIN, value == null? null: value.toShort(), null, null); } /** @@ -41,7 +43,7 @@ public PermissionParam(final FsPermission value) { * @param str a string representation of the parameter value. */ public PermissionParam(final String str) { - super(DOMAIN, DOMAIN.parse(str)); + super(DOMAIN, DOMAIN.parse(str), (short)0, (short)01777); } @Override @@ -51,7 +53,7 @@ public String getName() { /** @return the represented FsPermission. */ public FsPermission getFsPermission() { - final Short mode = getValue(); - return mode == null? FsPermission.getDefault(): new FsPermission(mode); + final Short v = getValue(); + return new FsPermission(v != null? v: DEFAULT_PERMISSION); } } \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ReplicationParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ReplicationParam.java index 1eee7ee34d..797709abde 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ReplicationParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ReplicationParam.java @@ -36,7 +36,7 @@ public class ReplicationParam extends ShortParam { * @param value the parameter value. */ public ReplicationParam(final Short value) { - super(DOMAIN, value); + super(DOMAIN, value, (short)1, null); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ShortParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ShortParam.java index af3e72f687..c1749cf18e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ShortParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ShortParam.java @@ -19,8 +19,24 @@ /** Short parameter. */ abstract class ShortParam extends Param { - ShortParam(final Domain domain, final Short value) { + ShortParam(final Domain domain, final Short value, + final Short min, final Short max) { super(domain, value); + checkRange(min, max); + } + + private void checkRange(final Short min, final Short max) { + if (value == null) { + return; + } + if (min != null && value < min) { + throw new IllegalArgumentException("Invalid parameter range: " + getName() + + " = " + domain.toString(value) + " < " + domain.toString(min)); + } + if (max != null && value > max) { + throw new IllegalArgumentException("Invalid parameter range: " + getName() + + " = " + domain.toString(value) + " > " + domain.toString(max)); + } } @Override @@ -49,7 +65,12 @@ public String getDomain() { @Override Short parse(final String str) { - return NULL.equals(str)? null: Short.parseShort(str, radix); + try { + return NULL.equals(str)? null: Short.parseShort(str, radix); + } catch(NumberFormatException e) { + throw new IllegalArgumentException("Failed to parse \"" + str + + "\" as a radix-" + radix + " short integer.", e); + } } /** Convert a Short to a String. */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java new file mode 100644 index 0000000000..9834cb74a4 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java @@ -0,0 +1,227 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.web.resources; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.junit.Assert; +import org.junit.Test; + +public class TestParam { + public static final Log LOG = LogFactory.getLog(TestParam.class); + + final Configuration conf = new Configuration(); + + @Test + public void testAccessTimeParam() { + final AccessTimeParam p = new AccessTimeParam(AccessTimeParam.DEFAULT); + Assert.assertEquals(-1L, p.getValue().longValue()); + + new AccessTimeParam(-1L); + + try { + new AccessTimeParam(-2L); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testBlockSizeParam() { + final BlockSizeParam p = new BlockSizeParam(BlockSizeParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + Assert.assertEquals( + conf.getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, + DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT), + p.getValue(conf)); + + new BlockSizeParam(1L); + + try { + new BlockSizeParam(0L); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testBufferSizeParam() { + final BufferSizeParam p = new BufferSizeParam(BufferSizeParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + Assert.assertEquals( + conf.getInt(CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY, + CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT), + p.getValue(conf)); + + new BufferSizeParam(1); + + try { + new BufferSizeParam(0); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testDelegationParam() { + final DelegationParam p = new DelegationParam(DelegationParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + } + + @Test + public void testDestinationParam() { + final DestinationParam p = new DestinationParam(DestinationParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + + new DestinationParam("/abc"); + + try { + new DestinationParam("abc"); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testGroupParam() { + final GroupParam p = new GroupParam(GroupParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + } + + @Test + public void testModificationTimeParam() { + final ModificationTimeParam p = new ModificationTimeParam(ModificationTimeParam.DEFAULT); + Assert.assertEquals(-1L, p.getValue().longValue()); + + new ModificationTimeParam(-1L); + + try { + new ModificationTimeParam(-2L); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testOverwriteParam() { + final OverwriteParam p = new OverwriteParam(OverwriteParam.DEFAULT); + Assert.assertEquals(false, p.getValue()); + + new OverwriteParam("trUe"); + + try { + new OverwriteParam("abc"); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testOwnerParam() { + final OwnerParam p = new OwnerParam(OwnerParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + } + + @Test + public void testPermissionParam() { + final PermissionParam p = new PermissionParam(PermissionParam.DEFAULT); + Assert.assertEquals(new FsPermission((short)0755), p.getFsPermission()); + + new PermissionParam("0"); + + try { + new PermissionParam("-1"); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + + new PermissionParam("1777"); + + try { + new PermissionParam("2000"); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + + try { + new PermissionParam("8"); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + + try { + new PermissionParam("abc"); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testRecursiveParam() { + final RecursiveParam p = new RecursiveParam(RecursiveParam.DEFAULT); + Assert.assertEquals(false, p.getValue()); + + new RecursiveParam("falSe"); + + try { + new RecursiveParam("abc"); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testRenewerParam() { + final RenewerParam p = new RenewerParam(RenewerParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + } + + @Test + public void testReplicationParam() { + final ReplicationParam p = new ReplicationParam(ReplicationParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + Assert.assertEquals( + (short)conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY, + DFSConfigKeys.DFS_REPLICATION_DEFAULT), + p.getValue(conf)); + + new ReplicationParam((short)1); + + try { + new ReplicationParam((short)0); + Assert.fail(); + } catch(IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } +}