HADOOP-7060. A more elegant FileSystem#listCorruptFileBlocks API. Contributed by Patrick Kling.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1044146 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Hairong Kuang 2010-12-09 22:13:06 +00:00
parent e75e481ec1
commit 6afbdb6e41
8 changed files with 22 additions and 207 deletions

View File

@ -24,7 +24,10 @@ Trunk (unreleased changes)
(Patrick Kling via eli)
HADOOP-7054 Change NN LoadGenerator to use FileContext APIs
(Sanjay Radia)
(Sanjay Radia)
HADOOP-7060. A more elegant FileSystem#listCorruptFileBlocks API.
(Patrick Kling via hairong)
OPTIMIZATIONS

View File

@ -835,11 +835,11 @@ public abstract FileStatus[] listStatus(final Path f)
UnresolvedLinkException, IOException;
/**
* @return a list in which each entry describes a corrupt file/block
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
* @throws IOException
*/
public CorruptFileBlocks listCorruptFileBlocks(String path,
String cookie)
public RemoteIterator<Path> listCorruptFileBlocks(Path path)
throws IOException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
" does not support" +

View File

@ -1,108 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.Text;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
/**
* Contains a list of paths corresponding to corrupt files and a cookie
* used for iterative calls to NameNode.listCorruptFileBlocks.
*
*/
public class CorruptFileBlocks implements Writable {
// used for hashCode
private static final int PRIME = 16777619;
private String[] files;
private String cookie;
public CorruptFileBlocks() {
this(new String[0], "");
}
public CorruptFileBlocks(String[] files, String cookie) {
this.files = files;
this.cookie = cookie;
}
public String[] getFiles() {
return files;
}
public String getCookie() {
return cookie;
}
/**
* {@inheritDoc}
*/
@Override
public void readFields(DataInput in) throws IOException {
int fileCount = in.readInt();
files = new String[fileCount];
for (int i = 0; i < fileCount; i++) {
files[i] = Text.readString(in);
}
cookie = Text.readString(in);
}
/**
* {@inheritDoc}
*/
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(files.length);
for (int i = 0; i < files.length; i++) {
Text.writeString(out, files[i]);
}
Text.writeString(out, cookie);
}
/**
* {@inheritDoc}
*/
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof CorruptFileBlocks)) {
return false;
}
CorruptFileBlocks other = (CorruptFileBlocks) obj;
return cookie.equals(other.cookie) &&
Arrays.equals(files, other.files);
}
/**
* {@inheritDoc}
*/
public int hashCode() {
int result = cookie.hashCode();
for (String file : files) {
result = PRIME * result + file.hashCode();
}
return result;
}
}

View File

@ -1298,18 +1298,19 @@ public RemoteIterator<FileStatus> next(
}
/**
* @return a list in which each entry describes a corrupt file/block
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
* @throws IOException
*/
public CorruptFileBlocks listCorruptFileBlocks(final String path,
final String cookie)
public RemoteIterator<Path> listCorruptFileBlocks(Path path)
throws IOException {
final Path absF = fixRelativePart(new Path(path));
return new FSLinkResolver<CorruptFileBlocks>() {
final Path absF = fixRelativePart(path);
return new FSLinkResolver<RemoteIterator<Path>>() {
@Override
public CorruptFileBlocks next(final AbstractFileSystem fs, final Path p)
public RemoteIterator<Path> next(final AbstractFileSystem fs,
final Path p)
throws IOException, UnresolvedLinkException {
return fs.listCorruptFileBlocks(p.toUri().getPath(), cookie);
return fs.listCorruptFileBlocks(p);
}
}.resolve(this, absF);
}

View File

@ -1091,11 +1091,11 @@ private void listStatus(ArrayList<FileStatus> results, Path f,
}
/**
* @return a list in which each entry describes a corrupt file/block
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
* @throws IOException
*/
public CorruptFileBlocks listCorruptFileBlocks(String path,
String cookie)
public RemoteIterator<Path> listCorruptFileBlocks(Path path)
throws IOException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
" does not support" +

View File

@ -170,10 +170,9 @@ public FileStatus[] listStatus(Path f) throws IOException {
* {@inheritDoc}
*/
@Override
public CorruptFileBlocks listCorruptFileBlocks(String path,
String cookie)
public RemoteIterator<Path> listCorruptFileBlocks(Path path)
throws IOException {
return fs.listCorruptFileBlocks(path, cookie);
return fs.listCorruptFileBlocks(path);
}
/** List files and its block locations in a directory. */

View File

@ -168,10 +168,9 @@ public FileStatus[] listStatus(Path f)
* {@inheritDoc}
*/
@Override
public CorruptFileBlocks listCorruptFileBlocks(String path,
String cookie)
public RemoteIterator<Path> listCorruptFileBlocks(Path path)
throws IOException {
return myFs.listCorruptFileBlocks(path, cookie);
return myFs.listCorruptFileBlocks(path);
}
@Override

View File

@ -1,79 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import static org.junit.Assert.*;
import org.junit.Test;
import org.apache.hadoop.io.DataOutputBuffer;
public class TestCorruptFileBlocks {
/**
* Serialize the cfb given, deserialize and return the result.
*/
static CorruptFileBlocks serializeAndDeserialize(CorruptFileBlocks cfb)
throws IOException {
DataOutputBuffer buf = new DataOutputBuffer();
cfb.write(buf);
byte[] data = buf.getData();
DataInputStream input = new DataInputStream(new ByteArrayInputStream(data));
CorruptFileBlocks result = new CorruptFileBlocks();
result.readFields(input);
return result;
}
/**
* Check whether cfb is unchanged after serialization and deserialization.
*/
static boolean checkSerialize(CorruptFileBlocks cfb)
throws IOException {
return cfb.equals(serializeAndDeserialize(cfb));
}
/**
* Test serialization and deserializaton of CorruptFileBlocks.
*/
@Test
public void testSerialization() throws IOException {
{
CorruptFileBlocks cfb = new CorruptFileBlocks();
assertTrue(checkSerialize(cfb));
}
{
String[] files = new String[0];
CorruptFileBlocks cfb = new CorruptFileBlocks(files, "");
assertTrue(checkSerialize(cfb));
}
{
String[] files = { "a", "bb", "ccc" };
CorruptFileBlocks cfb = new CorruptFileBlocks(files, "test");
assertTrue(checkSerialize(cfb));
}
}
}