HADOOP-6318. Upgrade to Avro 1.2.0.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@829279 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Doug Cutting 2009-10-23 23:36:29 +00:00
parent d7a7f02c3e
commit a13a0f3fba
6 changed files with 9 additions and 46 deletions

View File

@ -5,7 +5,7 @@
<classpathentry kind="src" path="src/test/core"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="var" path="ANT_HOME/lib/ant.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/avro-1.1.0.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/avro-1.2.0.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-cli-1.2.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-codec-1.3.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-el-1.0.jar"/>

View File

@ -30,6 +30,8 @@ Trunk (unreleased changes)
HADOOP-6326. Hundson runs should check for AspectJ warnings and report
failure if any is present (cos)
HADOOP-6318. Upgrade to Avro 1.2.0. (cutting)
OPTIMIZATIONS
BUG FIXES

View File

@ -16,7 +16,7 @@
#These are the versions of our dependencies (in alphabetical order)
apacheant.version=1.7.0
avro.version=1.1.0
avro.version=1.2.0
checkstyle.version=4.2

View File

@ -78,14 +78,7 @@ private void getPackages() {
@Override
protected DatumReader getReader(Map<String, String> metadata) {
try {
Class<SpecificRecord> clazz = (Class<SpecificRecord>)
getClassFromMetadata(metadata);
String prefix =
((clazz.getEnclosingClass() == null
|| "null".equals(clazz.getEnclosingClass().getName())) ?
clazz.getPackage().getName() + "."
: (clazz.getEnclosingClass().getName() + "$"));
return new ReflectDatumReader(ReflectData.get().getSchema(clazz), prefix);
return new ReflectDatumReader(getClassFromMetadata(metadata));
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@ -48,9 +48,7 @@ public boolean accept(Map<String, String> metadata) {
@Override
protected DatumReader getReader(Map<String, String> metadata) {
try {
Class<SpecificRecord> clazz = (Class<SpecificRecord>)
getClassFromMetadata(metadata);
return new SpecificDatumReader(clazz.newInstance().getSchema());
return new SpecificDatumReader(getClassFromMetadata(metadata));
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@ -114,13 +114,6 @@ public void writeBuffers(List<ByteBuffer> buffers) throws IOException {
public void close() throws IOException {}
}
private static class Invoker extends ReflectRequestor {
public Invoker(Protocol protocol, Transceiver transceiver)
throws IOException {
super(protocol, transceiver);
}
}
/** Construct a client-side proxy object that implements the named protocol,
* talking to a server at the named address. */
public static Object getProxy(Class<?> protocol,
@ -150,36 +143,14 @@ public static Object getProxy(Class<?> protocol,
new InvocationHandler() {
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
return new Invoker
(ReflectData.get().getProtocol(protocol),
return new ReflectRequestor
(protocol,
new ClientTransceiver(addr, ticket, conf, factory))
.invoke(proxy, method, args);
}
});
}
/** An Avro RPC Transceiver that provides a request passed through Hadoop RPC
* to the Avro RPC Responder for processing. */
private static class ServerTransceiver extends Transceiver {
List<ByteBuffer> request;
public ServerTransceiver(List<ByteBuffer> request) {
this.request = request;
}
public String getRemoteName() { return "remote"; }
public List<ByteBuffer> readBuffers() throws IOException {
return request;
}
public void writeBuffers(List<ByteBuffer> buffers) throws IOException {
throw new UnsupportedOperationException();
}
public void close() throws IOException {}
}
/** An Avro RPC Responder that can process requests passed via Hadoop RPC. */
private static class TunnelResponder extends ReflectResponder
implements TunnelProtocol {
@ -195,8 +166,7 @@ public long getProtocolVersion(String protocol, long version)
public BufferListWritable call(final BufferListWritable request)
throws IOException {
return new BufferListWritable
(respond(new ServerTransceiver(request.buffers)));
return new BufferListWritable(respond(request.buffers));
}
}