HADOOP-7006. Fix 'fs -getmerge' command to not be a no-op. Contributed by Chris Nauroth.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1027748 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Doug Cutting 2010-10-26 21:15:36 +00:00
parent aa0ca9d649
commit 0d8d3d03e4
3 changed files with 101 additions and 4 deletions

View File

@ -275,6 +275,9 @@ Trunk (unreleased changes)
HADOOP-6947. Kerberos relogin should set refreshKrb5Config to true. HADOOP-6947. Kerberos relogin should set refreshKrb5Config to true.
(Todd Lipcon via tomwhite) (Todd Lipcon via tomwhite)
HADOOP-7006. Fix 'fs -getmerge' command to not be a no-op.
(Chris Nauroth via cutting)
Release 0.21.1 - Unreleased Release 0.21.1 - Unreleased
IMPROVEMENTS IMPROVEMENTS

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import java.io.*; import java.io.*;
import java.util.Arrays;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipFile; import java.util.zip.ZipFile;
@ -279,13 +280,14 @@ public class FileUtil {
Configuration conf, String addString) throws IOException { Configuration conf, String addString) throws IOException {
dstFile = checkDest(srcDir.getName(), dstFS, dstFile, false); dstFile = checkDest(srcDir.getName(), dstFS, dstFile, false);
if (srcFS.getFileStatus(srcDir).isDirectory()) if (!srcFS.getFileStatus(srcDir).isDirectory())
return false; return false;
OutputStream out = dstFS.create(dstFile); OutputStream out = dstFS.create(dstFile);
try { try {
FileStatus contents[] = srcFS.listStatus(srcDir); FileStatus contents[] = srcFS.listStatus(srcDir);
Arrays.sort(contents);
for (int i = 0; i < contents.length; i++) { for (int i = 0; i < contents.length; i++) {
if (contents[i].isFile()) { if (contents[i].isFile()) {
InputStream in = srcFS.open(contents[i].getPath()); InputStream in = srcFS.open(contents[i].getPath());

View File

@ -17,14 +17,18 @@
*/ */
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileReader;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -32,8 +36,9 @@ import org.junit.Test;
public class TestFileUtil { public class TestFileUtil {
private static final Log LOG = LogFactory.getLog(TestFileUtil.class); private static final Log LOG = LogFactory.getLog(TestFileUtil.class);
final static private File TEST_DIR = new File(System.getProperty( private static final String TEST_ROOT_DIR = System.getProperty(
"test.build.data", "/tmp"), "fu"); "test.build.data", "/tmp") + "/fu";
private static final File TEST_DIR = new File(TEST_ROOT_DIR);
private static String FILE = "x"; private static String FILE = "x";
private static String LINK = "y"; private static String LINK = "y";
private static String DIR = "dir"; private static String DIR = "dir";
@ -41,9 +46,10 @@ public class TestFileUtil {
private File tmp = new File(TEST_DIR, "tmp"); private File tmp = new File(TEST_DIR, "tmp");
private File dir1 = new File(del, DIR + "1"); private File dir1 = new File(del, DIR + "1");
private File dir2 = new File(del, DIR + "2"); private File dir2 = new File(del, DIR + "2");
private File partitioned = new File(TEST_DIR, "partitioned");
/** /**
* Creates directories del and tmp for testing. * Creates multiple directories for testing.
* *
* Contents of them are * Contents of them are
* dir:tmp: * dir:tmp:
@ -54,12 +60,17 @@ public class TestFileUtil {
* dir: dir2 : file:x * dir: dir2 : file:x
* link: y to tmp/x * link: y to tmp/x
* link: tmpDir to tmp * link: tmpDir to tmp
* dir:partitioned:
* file: part-r-00000, contents: "foo"
* file: part-r-00001, contents: "bar"
*/ */
private void setupDirs() throws IOException { private void setupDirs() throws IOException {
Assert.assertFalse(del.exists()); Assert.assertFalse(del.exists());
Assert.assertFalse(tmp.exists()); Assert.assertFalse(tmp.exists());
Assert.assertFalse(partitioned.exists());
del.mkdirs(); del.mkdirs();
tmp.mkdirs(); tmp.mkdirs();
partitioned.mkdirs();
new File(del, FILE).createNewFile(); new File(del, FILE).createNewFile();
File tmpFile = new File(tmp, FILE); File tmpFile = new File(tmp, FILE);
tmpFile.createNewFile(); tmpFile.createNewFile();
@ -78,12 +89,39 @@ public class TestFileUtil {
File linkDir = new File(del, "tmpDir"); File linkDir = new File(del, "tmpDir");
FileUtil.symLink(tmp.toString(), linkDir.toString()); FileUtil.symLink(tmp.toString(), linkDir.toString());
Assert.assertEquals(5, del.listFiles().length); Assert.assertEquals(5, del.listFiles().length);
// create files in partitioned directories
createFile(partitioned, "part-r-00000", "foo");
createFile(partitioned, "part-r-00001", "bar");
}
/**
* Creates a new file in the specified directory, with the specified name and
* the specified file contents. This method will add a newline terminator to
* the end of the contents string in the destination file.
* @param directory File non-null destination directory.
* @param name String non-null file name.
* @param contents String non-null file contents.
* @throws IOException if an I/O error occurs.
*/
private void createFile(File directory, String name, String contents)
throws IOException {
File newFile = new File(directory, name);
PrintWriter pw = new PrintWriter(newFile);
try {
pw.println(contents);
}
finally {
pw.close();
}
} }
@After @After
public void tearDown() throws IOException { public void tearDown() throws IOException {
FileUtil.fullyDelete(del); FileUtil.fullyDelete(del);
FileUtil.fullyDelete(tmp); FileUtil.fullyDelete(tmp);
FileUtil.fullyDelete(partitioned);
} }
@Test @Test
@ -312,4 +350,58 @@ public class TestFileUtil {
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del)); boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
validateAndSetWritablePermissions(ret); validateAndSetWritablePermissions(ret);
} }
@Test
public void testCopyMergeSingleDirectory() throws IOException {
setupDirs();
boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
Assert.assertTrue("Expected successful copyMerge result.", copyMergeResult);
File merged = new File(TEST_DIR, "tmp/merged");
Assert.assertTrue("File tmp/merged must exist after copyMerge.",
merged.exists());
BufferedReader rdr = new BufferedReader(new FileReader(merged));
try {
Assert.assertEquals("Line 1 of merged file must contain \"foo\".",
"foo", rdr.readLine());
Assert.assertEquals("Line 2 of merged file must contain \"bar\".",
"bar", rdr.readLine());
Assert.assertNull("Expected end of file reading merged file.",
rdr.readLine());
}
finally {
rdr.close();
}
}
/**
* Calls FileUtil.copyMerge using the specified source and destination paths.
* Both source and destination are assumed to be on the local file system.
* The call will not delete source on completion and will not add an
* additional string between files.
* @param src String non-null source path.
* @param dst String non-null destination path.
* @return boolean true if the call to FileUtil.copyMerge was successful.
* @throws IOException if an I/O error occurs.
*/
private boolean copyMerge(String src, String dst)
throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
final boolean result;
try {
Path srcPath = new Path(TEST_ROOT_DIR, src);
Path dstPath = new Path(TEST_ROOT_DIR, dst);
boolean deleteSource = false;
String addString = null;
result = FileUtil.copyMerge(fs, srcPath, fs, dstPath, deleteSource, conf,
addString);
}
finally {
fs.close();
}
return result;
}
} }