cd7157784e
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1161332 13f79535-47bb-0310-9956-ffa450edef68
44196 lines
1.8 MiB
44196 lines
1.8 MiB
<?xml version="1.0" encoding="iso-8859-1" standalone="no"?>
|
|
<!-- Generated by the JDiff Javadoc doclet -->
|
|
<!-- (http://www.jdiff.org) -->
|
|
<!-- on Fri Feb 20 00:10:24 UTC 2009 -->
|
|
|
|
<api
|
|
xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'
|
|
xsi:noNamespaceSchemaLocation='api.xsd'
|
|
name="hadoop 0.19.1"
|
|
jdversion="1.1.1">
|
|
|
|
<!-- Command line arguments = -doclet jdiff.JDiff -docletpath /home/hadoopqa/tools/jdiff/latest/jdiff.jar:/home/hadoopqa/tools/jdiff/latest/xerces.jar -classpath /home/ndaley/hadoop/branch-0.19/build/classes:/home/ndaley/hadoop/branch-0.19/lib/commons-cli-2.0-SNAPSHOT.jar:/home/ndaley/hadoop/branch-0.19/lib/commons-codec-1.3.jar:/home/ndaley/hadoop/branch-0.19/lib/commons-httpclient-3.0.1.jar:/home/ndaley/hadoop/branch-0.19/lib/commons-logging-1.0.4.jar:/home/ndaley/hadoop/branch-0.19/lib/commons-logging-api-1.0.4.jar:/home/ndaley/hadoop/branch-0.19/lib/commons-net-1.4.1.jar:/home/ndaley/hadoop/branch-0.19/lib/hsqldb-1.8.0.10.jar:/home/ndaley/hadoop/branch-0.19/lib/jets3t-0.6.1.jar:/home/ndaley/hadoop/branch-0.19/lib/jetty-5.1.4.jar:/home/ndaley/hadoop/branch-0.19/lib/jetty-ext/commons-el.jar:/home/ndaley/hadoop/branch-0.19/lib/jetty-ext/jasper-compiler.jar:/home/ndaley/hadoop/branch-0.19/lib/jetty-ext/jasper-runtime.jar:/home/ndaley/hadoop/branch-0.19/lib/jetty-ext/jsp-api.jar:/home/ndaley/hadoop/branch-0.19/lib/junit-3.8.1.jar:/home/ndaley/hadoop/branch-0.19/lib/kfs-0.2.0.jar:/home/ndaley/hadoop/branch-0.19/lib/log4j-1.2.15.jar:/home/ndaley/hadoop/branch-0.19/lib/oro-2.0.8.jar:/home/ndaley/hadoop/branch-0.19/lib/servlet-api.jar:/home/ndaley/hadoop/branch-0.19/lib/slf4j-api-1.4.3.jar:/home/ndaley/hadoop/branch-0.19/lib/slf4j-log4j12-1.4.3.jar:/home/ndaley/hadoop/branch-0.19/lib/xmlenc-0.52.jar:/home/ndaley/hadoop/branch-0.19/conf:/home/ndaley/tools/ant/latest/lib/ant-launcher.jar:/home/ndaley/tools/ant/latest/lib/ant-antlr.jar:/home/ndaley/tools/ant/latest/lib/ant-apache-bcel.jar:/home/ndaley/tools/ant/latest/lib/ant-apache-bsf.jar:/home/ndaley/tools/ant/latest/lib/ant-apache-log4j.jar:/home/ndaley/tools/ant/latest/lib/ant-apache-oro.jar:/home/ndaley/tools/ant/latest/lib/ant-apache-regexp.jar:/home/ndaley/tools/ant/latest/lib/ant-apache-resolver.jar:/home/ndaley/tools/ant/latest/lib/ant-commons-logging.jar:/home/ndaley/tools/ant/latest/lib/ant-commons-net.jar:/home/ndaley/tools/ant/latest/lib/ant-jai.jar:/home/ndaley/tools/ant/latest/lib/ant-javamail.jar:/home/ndaley/tools/ant/latest/lib/ant-jdepend.jar:/home/ndaley/tools/ant/latest/lib/ant-jmf.jar:/home/ndaley/tools/ant/latest/lib/ant-jsch.jar:/home/ndaley/tools/ant/latest/lib/ant-junit.jar:/home/ndaley/tools/ant/latest/lib/ant-netrexx.jar:/home/ndaley/tools/ant/latest/lib/ant-nodeps.jar:/home/ndaley/tools/ant/latest/lib/ant-starteam.jar:/home/ndaley/tools/ant/latest/lib/ant-stylebook.jar:/home/ndaley/tools/ant/latest/lib/ant-swing.jar:/home/ndaley/tools/ant/latest/lib/ant-testutil.jar:/home/ndaley/tools/ant/latest/lib/ant-trax.jar:/home/ndaley/tools/ant/latest/lib/ant-weblogic.jar:/home/ndaley/tools/ant/latest/lib/ant.jar:/home/ndaley/tools/ant/latest/lib/xercesImpl.jar:/home/ndaley/tools/ant/latest/lib/xml-apis.jar:/home/hadoopqa/tools/java/jdk1.6.0_07-32bit/lib/tools.jar -sourcepath /home/ndaley/hadoop/branch-0.19/src/core:/home/ndaley/hadoop/branch-0.19/src/mapred:/home/ndaley/hadoop/branch-0.19/src/tools -apidir /home/ndaley/hadoop/branch-0.19/docs/jdiff -apiname hadoop 0.19.1 -->
|
|
<package name="org.apache.hadoop">
|
|
<!-- start class org.apache.hadoop.HadoopVersionAnnotation -->
|
|
<class name="HadoopVersionAnnotation" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.annotation.Annotation"/>
|
|
<doc>
|
|
<![CDATA[A package attribute that captures the version of Hadoop that was compiled.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.HadoopVersionAnnotation -->
|
|
</package>
|
|
<package name="org.apache.hadoop.conf">
|
|
<!-- start interface org.apache.hadoop.conf.Configurable -->
|
|
<interface name="Configurable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Set the configuration to be used by this object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the configuration used by this object.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Something that may be configured with a {@link Configuration}.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.conf.Configurable -->
|
|
<!-- start class org.apache.hadoop.conf.Configuration -->
|
|
<class name="Configuration" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Iterable<java.util.Map.Entry<java.lang.String, java.lang.String>>"/>
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A new configuration.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Configuration" type="boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A new configuration where the behavior of reading from the default
|
|
resources can be turned off.
|
|
|
|
If the parameter {@code loadDefaults} is false, the new instance
|
|
will not load resources from the default files.
|
|
@param loadDefaults specifies whether to load from the default files]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Configuration" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A new configuration with the same settings cloned from another.
|
|
|
|
@param other the configuration from which to clone settings.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addResource"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Add a configuration resource.
|
|
|
|
The properties of this resource will override properties of previously
|
|
added resources, unless they were marked <a href="#Final">final</a>.
|
|
|
|
@param name resource to be added, the classpath is examined for a file
|
|
with that name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addResource"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="url" type="java.net.URL"/>
|
|
<doc>
|
|
<![CDATA[Add a configuration resource.
|
|
|
|
The properties of this resource will override properties of previously
|
|
added resources, unless they were marked <a href="#Final">final</a>.
|
|
|
|
@param url url of the resource to be added, the local filesystem is
|
|
examined directly to find the resource, without referring to
|
|
the classpath.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addResource"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Add a configuration resource.
|
|
|
|
The properties of this resource will override properties of previously
|
|
added resources, unless they were marked <a href="#Final">final</a>.
|
|
|
|
@param file file-path of resource to be added, the local filesystem is
|
|
examined directly to find the resource, without referring to
|
|
the classpath.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addResource"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<doc>
|
|
<![CDATA[Add a configuration resource.
|
|
|
|
The properties of this resource will override properties of previously
|
|
added resources, unless they were marked <a href="#Final">final</a>.
|
|
|
|
@param in InputStream to deserialize the object from.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reloadConfiguration"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Reload configuration from previously added resources.
|
|
|
|
This method will clear all the configuration read from the added
|
|
resources, and final parameters. This will make the resources to
|
|
be read again before accessing the values. Values that are added
|
|
via set methods will overlay values read from the resources.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property, <code>null</code> if
|
|
no such property exists.
|
|
|
|
Values are processed for <a href="#VariableExpansion">variable expansion</a>
|
|
before being returned.
|
|
|
|
@param name the property name.
|
|
@return the value of the <code>name</code> property,
|
|
or null if no such property exists.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRaw" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property, without doing
|
|
<a href="#VariableExpansion">variable expansion</a>.
|
|
|
|
@param name the property name.
|
|
@return the value of the <code>name</code> property,
|
|
or null if no such property exists.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="value" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the <code>value</code> of the <code>name</code> property.
|
|
|
|
@param name property name.
|
|
@param value property value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property. If no such property
|
|
exists, then <code>defaultValue</code> is returned.
|
|
|
|
@param name property name.
|
|
@param defaultValue default value.
|
|
@return property value, or <code>defaultValue</code> if the property
|
|
doesn't exist.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property as an <code>int</code>.
|
|
|
|
If no such property exists, or if the specified value is not a valid
|
|
<code>int</code>, then <code>defaultValue</code> is returned.
|
|
|
|
@param name property name.
|
|
@param defaultValue default value.
|
|
@return property value as an <code>int</code>,
|
|
or <code>defaultValue</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInt"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="value" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the value of the <code>name</code> property to an <code>int</code>.
|
|
|
|
@param name property name.
|
|
@param value <code>int</code> value of the property.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property as a <code>long</code>.
|
|
If no such property is specified, or if the specified value is not a valid
|
|
<code>long</code>, then <code>defaultValue</code> is returned.
|
|
|
|
@param name property name.
|
|
@param defaultValue default value.
|
|
@return property value as a <code>long</code>,
|
|
or <code>defaultValue</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setLong"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="value" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the value of the <code>name</code> property to a <code>long</code>.
|
|
|
|
@param name property name.
|
|
@param value <code>long</code> value of the property.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFloat" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="float"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property as a <code>float</code>.
|
|
If no such property is specified, or if the specified value is not a valid
|
|
<code>float</code>, then <code>defaultValue</code> is returned.
|
|
|
|
@param name property name.
|
|
@param defaultValue default value.
|
|
@return property value as a <code>float</code>,
|
|
or <code>defaultValue</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBoolean" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property as a <code>boolean</code>.
|
|
If no such property is specified, or if the specified value is not a valid
|
|
<code>boolean</code>, then <code>defaultValue</code> is returned.
|
|
|
|
@param name property name.
|
|
@param defaultValue default value.
|
|
@return property value as a <code>boolean</code>,
|
|
or <code>defaultValue</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setBoolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="value" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set the value of the <code>name</code> property to a <code>boolean</code>.
|
|
|
|
@param name property name.
|
|
@param value <code>boolean</code> value of the property.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRange" return="org.apache.hadoop.conf.Configuration.IntegerRanges"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Parse the given attribute as a set of integer ranges
|
|
@param name the attribute name
|
|
@param defaultValue the default value if it is not set
|
|
@return a new set of ranges from the configured value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStringCollection" return="java.util.Collection<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the comma delimited values of the <code>name</code> property as
|
|
a collection of <code>String</code>s.
|
|
If no such property is specified then empty collection is returned.
|
|
<p>
|
|
This is an optimized version of {@link #getStrings(String)}
|
|
|
|
@param name property name.
|
|
@return property value as a collection of <code>String</code>s.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStrings" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the comma delimited values of the <code>name</code> property as
|
|
an array of <code>String</code>s.
|
|
If no such property is specified then <code>null</code> is returned.
|
|
|
|
@param name property name.
|
|
@return property value as an array of <code>String</code>s,
|
|
or <code>null</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStrings" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Get the comma delimited values of the <code>name</code> property as
|
|
an array of <code>String</code>s.
|
|
If no such property is specified then default value is returned.
|
|
|
|
@param name property name.
|
|
@param defaultValue The default value
|
|
@return property value as an array of <code>String</code>s,
|
|
or default value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setStrings"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="values" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Set the array of string values for the <code>name</code> property as
|
|
as comma delimited values.
|
|
|
|
@param name property name.
|
|
@param values The values]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClassByName" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<exception name="ClassNotFoundException" type="java.lang.ClassNotFoundException"/>
|
|
<doc>
|
|
<![CDATA[Load a class by name.
|
|
|
|
@param name the class name.
|
|
@return the class object.
|
|
@throws ClassNotFoundException if the class is not found.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClasses" return="java.lang.Class[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="java.lang.Class[]"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property
|
|
as an array of <code>Class</code>.
|
|
The value of the property specifies a list of comma separated class names.
|
|
If no such property is specified, then <code>defaultValue</code> is
|
|
returned.
|
|
|
|
@param name the property name.
|
|
@param defaultValue default value.
|
|
@return property value as a <code>Class[]</code>,
|
|
or <code>defaultValue</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property as a <code>Class</code>.
|
|
If no such property is specified, then <code>defaultValue</code> is
|
|
returned.
|
|
|
|
@param name the class name.
|
|
@param defaultValue default value.
|
|
@return property value as a <code>Class</code>,
|
|
or <code>defaultValue</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClass" return="java.lang.Class<? extends U>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="defaultValue" type="java.lang.Class<? extends U>"/>
|
|
<param name="xface" type="java.lang.Class<U>"/>
|
|
<doc>
|
|
<![CDATA[Get the value of the <code>name</code> property as a <code>Class</code>
|
|
implementing the interface specified by <code>xface</code>.
|
|
|
|
If no such property is specified, then <code>defaultValue</code> is
|
|
returned.
|
|
|
|
An exception is thrown if the returned class does not implement the named
|
|
interface.
|
|
|
|
@param name the class name.
|
|
@param defaultValue default value.
|
|
@param xface the interface implemented by the named class.
|
|
@return property value as a <code>Class</code>,
|
|
or <code>defaultValue</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="theClass" type="java.lang.Class<?>"/>
|
|
<param name="xface" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Set the value of the <code>name</code> property to the name of a
|
|
<code>theClass</code> implementing the given interface <code>xface</code>.
|
|
|
|
An exception is thrown if <code>theClass</code> does not implement the
|
|
interface <code>xface</code>.
|
|
|
|
@param name property name.
|
|
@param theClass property value.
|
|
@param xface the interface implemented by the named class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dirsProp" type="java.lang.String"/>
|
|
<param name="path" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get a local file under a directory named by <i>dirsProp</i> with
|
|
the given <i>path</i>. If <i>dirsProp</i> contains multiple directories,
|
|
then one is chosen based on <i>path</i>'s hash code. If the selected
|
|
directory does not exist, an attempt is made to create it.
|
|
|
|
@param dirsProp directory in which to locate the file.
|
|
@param path file-path.
|
|
@return local file under the directory with the given path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFile" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dirsProp" type="java.lang.String"/>
|
|
<param name="path" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get a local file name under a directory named in <i>dirsProp</i> with
|
|
the given <i>path</i>. If <i>dirsProp</i> contains multiple directories,
|
|
then one is chosen based on <i>path</i>'s hash code. If the selected
|
|
directory does not exist, an attempt is made to create it.
|
|
|
|
@param dirsProp directory in which to locate the file.
|
|
@param path file-path.
|
|
@return local file under the directory with the given path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getResource" return="java.net.URL"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link URL} for the named resource.
|
|
|
|
@param name resource name.
|
|
@return the url for the named resource.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConfResourceAsInputStream" return="java.io.InputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get an input stream attached to the configuration resource with the
|
|
given <code>name</code>.
|
|
|
|
@param name configuration resource name.
|
|
@return an input stream attached to the resource.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConfResourceAsReader" return="java.io.Reader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get a {@link Reader} attached to the configuration resource with the
|
|
given <code>name</code>.
|
|
|
|
@param name configuration resource name.
|
|
@return a reader attached to the resource.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="size" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the number of keys in the configuration.
|
|
|
|
@return number of keys in the configuration.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Clears all keys from the configuration.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="iterator" return="java.util.Iterator<java.util.Map.Entry<java.lang.String, java.lang.String>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get an {@link Iterator} to go through the list of <code>String</code>
|
|
key-value pairs in the configuration.
|
|
|
|
@return an iterator over the entries.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeXml"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write out the non-default properties in this configuration to the give
|
|
{@link OutputStream}.
|
|
|
|
@param out the output stream to write to.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClassLoader" return="java.lang.ClassLoader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link ClassLoader} for this job.
|
|
|
|
@return the correct class loader.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setClassLoader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="classLoader" type="java.lang.ClassLoader"/>
|
|
<doc>
|
|
<![CDATA[Set the class loader that will be used to load the various objects.
|
|
|
|
@param classLoader the new class loader.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setQuietMode"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="quietmode" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set the quiteness-mode.
|
|
|
|
In the the quite-mode error and informational messages might not be logged.
|
|
|
|
@param quietmode <code>true</code> to set quiet-mode on, <code>false</code>
|
|
to turn it off.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[For debugging. List non-default properties to the terminal and exit.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Provides access to configuration parameters.
|
|
|
|
<h4 id="Resources">Resources</h4>
|
|
|
|
<p>Configurations are specified by resources. A resource contains a set of
|
|
name/value pairs as XML data. Each resource is named by either a
|
|
<code>String</code> or by a {@link Path}. If named by a <code>String</code>,
|
|
then the classpath is examined for a file with that name. If named by a
|
|
<code>Path</code>, then the local filesystem is examined directly, without
|
|
referring to the classpath.
|
|
|
|
<p>Unless explicitly turned off, Hadoop by default specifies two
|
|
resources, loaded in-order from the classpath: <ol>
|
|
<li><tt><a href="{@docRoot}/../hadoop-default.html">hadoop-default.xml</a>
|
|
</tt>: Read-only defaults for hadoop.</li>
|
|
<li><tt>hadoop-site.xml</tt>: Site-specific configuration for a given hadoop
|
|
installation.</li>
|
|
</ol>
|
|
Applications may add additional resources, which are loaded
|
|
subsequent to these resources in the order they are added.
|
|
|
|
<h4 id="FinalParams">Final Parameters</h4>
|
|
|
|
<p>Configuration parameters may be declared <i>final</i>.
|
|
Once a resource declares a value final, no subsequently-loaded
|
|
resource can alter that value.
|
|
For example, one might define a final parameter with:
|
|
<tt><pre>
|
|
<property>
|
|
<name>dfs.client.buffer.dir</name>
|
|
<value>/tmp/hadoop/dfs/client</value>
|
|
<b><final>true</final></b>
|
|
</property></pre></tt>
|
|
|
|
Administrators typically define parameters as final in
|
|
<tt>hadoop-site.xml</tt> for values that user applications may not alter.
|
|
|
|
<h4 id="VariableExpansion">Variable Expansion</h4>
|
|
|
|
<p>Value strings are first processed for <i>variable expansion</i>. The
|
|
available properties are:<ol>
|
|
<li>Other properties defined in this Configuration; and, if a name is
|
|
undefined here,</li>
|
|
<li>Properties in {@link System#getProperties()}.</li>
|
|
</ol>
|
|
|
|
<p>For example, if a configuration resource contains the following property
|
|
definitions:
|
|
<tt><pre>
|
|
<property>
|
|
<name>basedir</name>
|
|
<value>/user/${<i>user.name</i>}</value>
|
|
</property>
|
|
|
|
<property>
|
|
<name>tempdir</name>
|
|
<value>${<i>basedir</i>}/tmp</value>
|
|
</property></pre></tt>
|
|
|
|
When <tt>conf.get("tempdir")</tt> is called, then <tt>${<i>basedir</i>}</tt>
|
|
will be resolved to another property in this Configuration, while
|
|
<tt>${<i>user.name</i>}</tt> would then ordinarily be resolved to the value
|
|
of the System property with that name.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.conf.Configuration -->
|
|
<!-- start class org.apache.hadoop.conf.Configuration.IntegerRanges -->
|
|
<class name="Configuration.IntegerRanges" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Configuration.IntegerRanges"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Configuration.IntegerRanges" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="isIncluded" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="int"/>
|
|
<doc>
|
|
<![CDATA[Is the given value in the set of ranges
|
|
@param value the value to check
|
|
@return is the value in the ranges?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A class that represents a set of positive integer ranges. It parses
|
|
strings of the form: "2-3,5,7-" where ranges are separated by comma and
|
|
the lower/upper bounds are separated by dash. Either the lower or upper
|
|
bound may be omitted meaning all values up to or over. So the string
|
|
above means 2, 3, 5, and 7, 8, 9, ...]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.conf.Configuration.IntegerRanges -->
|
|
<!-- start class org.apache.hadoop.conf.Configured -->
|
|
<class name="Configured" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="Configured"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a Configured.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Configured" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a Configured.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Base class for things that may be configured with a {@link Configuration}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.conf.Configured -->
|
|
</package>
|
|
<package name="org.apache.hadoop.filecache">
|
|
<!-- start class org.apache.hadoop.filecache.DistributedCache -->
|
|
<class name="DistributedCache" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DistributedCache"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getLocalCache" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cache" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="baseDir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="fileStatus" type="org.apache.hadoop.fs.FileStatus"/>
|
|
<param name="isArchive" type="boolean"/>
|
|
<param name="confFileStamp" type="long"/>
|
|
<param name="currentWorkDir" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the locally cached file or archive; it could either be
|
|
previously cached (and valid) or copy it from the {@link FileSystem} now.
|
|
|
|
@param cache the cache to be localized, this should be specified as
|
|
new URI(hdfs://hostname:port/absolute_path_to_file#LINKNAME). If no schema
|
|
or hostname:port is provided the file is assumed to be in the filesystem
|
|
being used in the Configuration
|
|
@param conf The Confguration file which contains the filesystem
|
|
@param baseDir The base cache Dir where you wnat to localize the files/archives
|
|
@param fileStatus The file status on the dfs.
|
|
@param isArchive if the cache is an archive or a file. In case it is an
|
|
archive with a .zip or .jar or .tar or .tgz or .tar.gz extension it will
|
|
be unzipped/unjarred/untarred automatically
|
|
and the directory where the archive is unzipped/unjarred/untarred is
|
|
returned as the Path.
|
|
In case of a file, the path to the file is returned
|
|
@param confFileStamp this is the hdfs file modification timestamp to verify that the
|
|
file to be cached hasn't changed since the job started
|
|
@param currentWorkDir this is the directory where you would want to create symlinks
|
|
for the locally cached files/archives
|
|
@return the path to directory where the archives are unjarred in case of archives,
|
|
the path to the file where the file is copied locally
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalCache" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cache" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="baseDir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="fileStatus" type="org.apache.hadoop.fs.FileStatus"/>
|
|
<param name="isArchive" type="boolean"/>
|
|
<param name="confFileStamp" type="long"/>
|
|
<param name="currentWorkDir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="honorSymLinkConf" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the locally cached file or archive; it could either be
|
|
previously cached (and valid) or copy it from the {@link FileSystem} now.
|
|
|
|
@param cache the cache to be localized, this should be specified as
|
|
new URI(hdfs://hostname:port/absolute_path_to_file#LINKNAME). If no schema
|
|
or hostname:port is provided the file is assumed to be in the filesystem
|
|
being used in the Configuration
|
|
@param conf The Confguration file which contains the filesystem
|
|
@param baseDir The base cache Dir where you wnat to localize the files/archives
|
|
@param fileStatus The file status on the dfs.
|
|
@param isArchive if the cache is an archive or a file. In case it is an
|
|
archive with a .zip or .jar or .tar or .tgz or .tar.gz extension it will
|
|
be unzipped/unjarred/untarred automatically
|
|
and the directory where the archive is unzipped/unjarred/untarred is
|
|
returned as the Path.
|
|
In case of a file, the path to the file is returned
|
|
@param confFileStamp this is the hdfs file modification timestamp to verify that the
|
|
file to be cached hasn't changed since the job started
|
|
@param currentWorkDir this is the directory where you would want to create symlinks
|
|
for the locally cached files/archives
|
|
@param honorSymLinkConf if this is false, then the symlinks are not
|
|
created even if conf says so (this is required for an optimization in task
|
|
launches
|
|
@return the path to directory where the archives are unjarred in case of archives,
|
|
the path to the file where the file is copied locally
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalCache" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cache" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="baseDir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="isArchive" type="boolean"/>
|
|
<param name="confFileStamp" type="long"/>
|
|
<param name="currentWorkDir" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the locally cached file or archive; it could either be
|
|
previously cached (and valid) or copy it from the {@link FileSystem} now.
|
|
|
|
@param cache the cache to be localized, this should be specified as
|
|
new URI(hdfs://hostname:port/absolute_path_to_file#LINKNAME). If no schema
|
|
or hostname:port is provided the file is assumed to be in the filesystem
|
|
being used in the Configuration
|
|
@param conf The Confguration file which contains the filesystem
|
|
@param baseDir The base cache Dir where you wnat to localize the files/archives
|
|
@param isArchive if the cache is an archive or a file. In case it is an
|
|
archive with a .zip or .jar or .tar or .tgz or .tar.gz extension it will
|
|
be unzipped/unjarred/untarred automatically
|
|
and the directory where the archive is unzipped/unjarred/untarred
|
|
is returned as the Path.
|
|
In case of a file, the path to the file is returned
|
|
@param confFileStamp this is the hdfs file modification timestamp to verify that the
|
|
file to be cached hasn't changed since the job started
|
|
@param currentWorkDir this is the directory where you would want to create symlinks
|
|
for the locally cached files/archives
|
|
@return the path to directory where the archives are unjarred in case of archives,
|
|
the path to the file where the file is copied locally
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="releaseCache"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cache" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This is the opposite of getlocalcache. When you are done with
|
|
using the cache, you need to release the cache
|
|
@param cache The cache URI to be released
|
|
@param conf configuration which contains the filesystem the cache
|
|
is contained in.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeRelative" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cache" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getTimestamp" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="cache" type="java.net.URI"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns mtime of a given cache file on hdfs.
|
|
@param conf configuration
|
|
@param cache cache file
|
|
@return mtime of a given cache file on hdfs
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createAllSymlink"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="jobCacheDir" type="java.io.File"/>
|
|
<param name="workDir" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This method create symlinks for all files in a given dir in another directory
|
|
@param conf the configuration
|
|
@param jobCacheDir the target directory for creating symlinks
|
|
@param workDir the directory in which the symlinks are created
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCacheArchives"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="archives" type="java.net.URI[]"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Set the configuration with the given set of archives
|
|
@param archives The list of archives that need to be localized
|
|
@param conf Configuration which will be changed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCacheFiles"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="files" type="java.net.URI[]"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Set the configuration with the given set of files
|
|
@param files The list of files that need to be localized
|
|
@param conf Configuration which will be changed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCacheArchives" return="java.net.URI[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get cache archives set in the Configuration
|
|
@param conf The configuration which contains the archives
|
|
@return A URI array of the caches set in the Configuration
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCacheFiles" return="java.net.URI[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get cache files set in the Configuration
|
|
@param conf The configuration which contains the files
|
|
@return A URI array of the files set in the Configuration
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalCacheArchives" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the path array of the localized caches
|
|
@param conf Configuration that contains the localized archives
|
|
@return A path array of localized caches
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalCacheFiles" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the path array of the localized files
|
|
@param conf Configuration that contains the localized files
|
|
@return A path array of localized files
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getArchiveTimestamps" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the timestamps of the archives
|
|
@param conf The configuration which stored the timestamps
|
|
@return a string array of timestamps
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileTimestamps" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the timestamps of the files
|
|
@param conf The configuration which stored the timestamps
|
|
@return a string array of timestamps
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setArchiveTimestamps"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="timestamps" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[This is to check the timestamp of the archives to be localized
|
|
@param conf Configuration which stores the timestamp's
|
|
@param timestamps comma separated list of timestamps of archives.
|
|
The order should be the same as the order in which the archives are added.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setFileTimestamps"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="timestamps" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[This is to check the timestamp of the files to be localized
|
|
@param conf Configuration which stores the timestamp's
|
|
@param timestamps comma separated list of timestamps of files.
|
|
The order should be the same as the order in which the files are added.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setLocalArchives"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the conf to contain the location for localized archives
|
|
@param conf The conf to modify to contain the localized caches
|
|
@param str a comma separated list of local archives]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setLocalFiles"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the conf to contain the location for localized files
|
|
@param conf The conf to modify to contain the localized caches
|
|
@param str a comma separated list of local files]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addCacheArchive"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Add a archives to be localized to the conf
|
|
@param uri The uri of the cache to be localized
|
|
@param conf Configuration to add the cache to]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addCacheFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Add a file to be localized to the conf
|
|
@param uri The uri of the cache to be localized
|
|
@param conf Configuration to add the cache to]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addFileToClassPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add an file path to the current set of classpath entries It adds the file
|
|
to cache as well.
|
|
|
|
@param file Path of the file to be added
|
|
@param conf Configuration that contains the classpath setting]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileClassPaths" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the file entries in classpath as an array of Path
|
|
|
|
@param conf Configuration that contains the classpath setting]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addArchiveToClassPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="archive" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add an archive path to the current set of classpath entries. It adds the
|
|
archive to cache as well.
|
|
|
|
@param archive Path of the archive to be added
|
|
@param conf Configuration that contains the classpath setting]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getArchiveClassPaths" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the archive entries in classpath as an array of Path
|
|
|
|
@param conf Configuration that contains the classpath setting]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createSymlink"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[This method allows you to create symlinks in the current working directory
|
|
of the task to all the cache files/archives
|
|
@param conf the jobconf]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSymlink" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[This method checks to see if symlinks are to be create for the
|
|
localized cache files in the current working directory
|
|
@param conf the jobconf
|
|
@return true if symlinks are to be created- else return false]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checkURIs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uriFiles" type="java.net.URI[]"/>
|
|
<param name="uriArchives" type="java.net.URI[]"/>
|
|
<doc>
|
|
<![CDATA[This method checks if there is a conflict in the fragment names
|
|
of the uris. Also makes sure that each uri has a fragment. It
|
|
is only to be called if you want to create symlinks for
|
|
the various archives and files.
|
|
@param uriFiles The uri array of urifiles
|
|
@param uriArchives the uri array of uri archives]]>
|
|
</doc>
|
|
</method>
|
|
<method name="purgeCache"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Clear the entire contents of the cache and delete the backing files. This
|
|
should only be used when the server is reinitializing, because the users
|
|
are going to lose their files.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Distribute application-specific large, read-only files efficiently.
|
|
|
|
<p><code>DistributedCache</code> is a facility provided by the Map-Reduce
|
|
framework to cache files (text, archives, jars etc.) needed by applications.
|
|
</p>
|
|
|
|
<p>Applications specify the files, via urls (hdfs:// or http://) to be cached
|
|
via the {@link org.apache.hadoop.mapred.JobConf}.
|
|
The <code>DistributedCache</code> assumes that the
|
|
files specified via hdfs:// urls are already present on the
|
|
{@link FileSystem} at the path specified by the url.</p>
|
|
|
|
<p>The framework will copy the necessary files on to the slave node before
|
|
any tasks for the job are executed on that node. Its efficiency stems from
|
|
the fact that the files are only copied once per job and the ability to
|
|
cache archives which are un-archived on the slaves.</p>
|
|
|
|
<p><code>DistributedCache</code> can be used to distribute simple, read-only
|
|
data/text files and/or more complex types such as archives, jars etc.
|
|
Archives (zip, tar and tgz/tar.gz files) are un-archived at the slave nodes.
|
|
Jars may be optionally added to the classpath of the tasks, a rudimentary
|
|
software distribution mechanism. Files have execution permissions.
|
|
Optionally users can also direct it to symlink the distributed cache file(s)
|
|
into the working directory of the task.</p>
|
|
|
|
<p><code>DistributedCache</code> tracks modification timestamps of the cache
|
|
files. Clearly the cache files should not be modified by the application
|
|
or externally while the job is executing.</p>
|
|
|
|
<p>Here is an illustrative example on how to use the
|
|
<code>DistributedCache</code>:</p>
|
|
<p><blockquote><pre>
|
|
// Setting up the cache for the application
|
|
|
|
1. Copy the requisite files to the <code>FileSystem</code>:
|
|
|
|
$ bin/hadoop fs -copyFromLocal lookup.dat /myapp/lookup.dat
|
|
$ bin/hadoop fs -copyFromLocal map.zip /myapp/map.zip
|
|
$ bin/hadoop fs -copyFromLocal mylib.jar /myapp/mylib.jar
|
|
$ bin/hadoop fs -copyFromLocal mytar.tar /myapp/mytar.tar
|
|
$ bin/hadoop fs -copyFromLocal mytgz.tgz /myapp/mytgz.tgz
|
|
$ bin/hadoop fs -copyFromLocal mytargz.tar.gz /myapp/mytargz.tar.gz
|
|
|
|
2. Setup the application's <code>JobConf</code>:
|
|
|
|
JobConf job = new JobConf();
|
|
DistributedCache.addCacheFile(new URI("/myapp/lookup.dat#lookup.dat"),
|
|
job);
|
|
DistributedCache.addCacheArchive(new URI("/myapp/map.zip", job);
|
|
DistributedCache.addFileToClassPath(new Path("/myapp/mylib.jar"), job);
|
|
DistributedCache.addCacheArchive(new URI("/myapp/mytar.tar", job);
|
|
DistributedCache.addCacheArchive(new URI("/myapp/mytgz.tgz", job);
|
|
DistributedCache.addCacheArchive(new URI("/myapp/mytargz.tar.gz", job);
|
|
|
|
3. Use the cached files in the {@link org.apache.hadoop.mapred.Mapper}
|
|
or {@link org.apache.hadoop.mapred.Reducer}:
|
|
|
|
public static class MapClass extends MapReduceBase
|
|
implements Mapper<K, V, K, V> {
|
|
|
|
private Path[] localArchives;
|
|
private Path[] localFiles;
|
|
|
|
public void configure(JobConf job) {
|
|
// Get the cached archives/files
|
|
localArchives = DistributedCache.getLocalCacheArchives(job);
|
|
localFiles = DistributedCache.getLocalCacheFiles(job);
|
|
}
|
|
|
|
public void map(K key, V value,
|
|
OutputCollector<K, V> output, Reporter reporter)
|
|
throws IOException {
|
|
// Use data from the cached archives/files here
|
|
// ...
|
|
// ...
|
|
output.collect(k, v);
|
|
}
|
|
}
|
|
|
|
</pre></blockquote></p>
|
|
|
|
@see org.apache.hadoop.mapred.JobConf
|
|
@see org.apache.hadoop.mapred.JobClient]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.filecache.DistributedCache -->
|
|
</package>
|
|
<package name="org.apache.hadoop.fs">
|
|
<!-- start class org.apache.hadoop.fs.BlockLocation -->
|
|
<class name="BlockLocation" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="BlockLocation"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default Constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="BlockLocation" type="java.lang.String[], java.lang.String[], long, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor with host, name, offset and length]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getHosts" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the list of hosts (hostname) hosting this block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNames" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the list of names (hostname:port) hosting this block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOffset" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the start offset of file associated with this block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the length of the block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOffset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="offset" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the start offset of file associated with this block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setLength"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="length" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the length of block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setHosts"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="hosts" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set the hosts hosting this block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setNames"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="names" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set the names (host:port) hosting this block]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Implement write of Writable]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Implement readFields of Writable]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.BlockLocation -->
|
|
<!-- start class org.apache.hadoop.fs.BufferedFSInputStream -->
|
|
<class name="BufferedFSInputStream" extends="java.io.BufferedInputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.fs.Seekable"/>
|
|
<implements name="org.apache.hadoop.fs.PositionedReadable"/>
|
|
<constructor name="BufferedFSInputStream" type="org.apache.hadoop.fs.FSInputStream, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a <code>BufferedFSInputStream</code>
|
|
with the specified buffer size,
|
|
and saves its argument, the input stream
|
|
<code>in</code>, for later use. An internal
|
|
buffer array of length <code>size</code>
|
|
is created and stored in <code>buf</code>.
|
|
|
|
@param in the underlying input stream.
|
|
@param size the buffer size.
|
|
@exception IllegalArgumentException if size <= 0.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="skip" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="seekToNewSource" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="targetPos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A class optimizes reading from FSInputStream by bufferring]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.BufferedFSInputStream -->
|
|
<!-- start class org.apache.hadoop.fs.ChecksumException -->
|
|
<class name="ChecksumException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ChecksumException" type="java.lang.String, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Thrown for checksum errors.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.ChecksumException -->
|
|
<!-- start class org.apache.hadoop.fs.ChecksumFileSystem -->
|
|
<class name="ChecksumFileSystem" extends="org.apache.hadoop.fs.FilterFileSystem"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ChecksumFileSystem" type="org.apache.hadoop.fs.FileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getApproxChkSumLength" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="size" type="long"/>
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getRawFileSystem" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[get the raw file system]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getChecksumFile" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Return the name of the checksum file associated with a file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isChecksumFile" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Return true iff file is a checksum file name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getChecksumFileLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="fileSize" type="long"/>
|
|
<doc>
|
|
<![CDATA[Return the length of the checksum file given the size of the
|
|
actual file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesPerSum" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the bytes Per Checksum]]>
|
|
</doc>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataInputStream at the indicated Path.
|
|
@param f the file name to open
|
|
@param bufferSize the size of the buffer to be used.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getChecksumLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="size" type="long"/>
|
|
<param name="bytesPerSum" type="int"/>
|
|
<doc>
|
|
<![CDATA[Calculated the length of the checksum file in bytes.
|
|
@param size the length of the data file in bytes
|
|
@param bytesPerSum the number of bytes in a checksum block
|
|
@return the number of bytes in the checksum file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setReplication" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="replication" type="short"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set replication for an existing file.
|
|
Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
|
|
@param src file name
|
|
@param replication new replication
|
|
@throws IOException
|
|
@return true if successful;
|
|
false if file does not exist or is a directory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Rename files/dirs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Implement the delete(Path, boolean) in checksum
|
|
file system.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[List the statuses of the files/directories in the given path if the path is
|
|
a directory.
|
|
|
|
@param f
|
|
given path
|
|
@return the statuses of the files/directories in the given patch
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is under FS, and the dst is on the local disk.
|
|
Copy it from FS control to the local dst name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="copyCrc" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is under FS, and the dst is on the local disk.
|
|
Copy it from FS control to the local dst name.
|
|
If src and dst are directories, the copyCrc parameter
|
|
determines whether to copy CRC files.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startLocalOutput" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="completeLocalOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reportChecksumFailure" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="in" type="org.apache.hadoop.fs.FSDataInputStream"/>
|
|
<param name="inPos" type="long"/>
|
|
<param name="sums" type="org.apache.hadoop.fs.FSDataInputStream"/>
|
|
<param name="sumsPos" type="long"/>
|
|
<doc>
|
|
<![CDATA[Report a checksum error to the file system.
|
|
@param f the file name containing the error
|
|
@param in the stream open on the file
|
|
@param inPos the position of the beginning of the bad data in the file
|
|
@param sums the stream open on the checksum file
|
|
@param sumsPos the position of the beginning of the bad data in the checksum file
|
|
@return if retry is neccessary]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Abstract Checksumed FileSystem.
|
|
It provide a basice implementation of a Checksumed FileSystem,
|
|
which creates a checksum file for each raw file.
|
|
It generates & verifies checksums at the client side.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.ChecksumFileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.ContentSummary -->
|
|
<class name="ContentSummary" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="ContentSummary"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ContentSummary" type="long, long, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ContentSummary" type="long, long, long, long, long, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDirectoryCount" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the directory count]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileCount" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the file count]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getQuota" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the directory quota]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSpaceConsumed" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Retuns (disk) space consumed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSpaceQuota" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns (disk) space quota]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHeader" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="qOption" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Return the header of the output.
|
|
if qOption is false, output directory count, file count, and content size;
|
|
if qOption is true, output quota and remaining quota as well.
|
|
|
|
@param qOption a flag indicating if quota needs to be printed or not
|
|
@return the header of the output]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="qOption" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Return the string representation of the object in the output format.
|
|
if qOption is false, output directory count, file count, and content size;
|
|
if qOption is true, output quota and remaining quota as well.
|
|
|
|
@param qOption a flag indicating if quota needs to be printed or not
|
|
@return the string representation of the object]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Store the summary of a content (a directory or a file).]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.ContentSummary -->
|
|
<!-- start class org.apache.hadoop.fs.DF -->
|
|
<class name="DF" extends="org.apache.hadoop.util.Shell"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DF" type="java.io.File, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<constructor name="DF" type="java.io.File, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="getDirPath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getFilesystem" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getCapacity" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getUsed" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getAvailable" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getPercentUsed" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getMount" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getExecString" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="parseExecResult"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="lines" type="java.io.BufferedReader"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<field name="DF_INTERVAL_DEFAULT" type="long"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Filesystem disk space usage statistics. Uses the unix 'df' program.
|
|
Tested on Linux, FreeBSD, Cygwin.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.DF -->
|
|
<!-- start class org.apache.hadoop.fs.DU -->
|
|
<class name="DU" extends="org.apache.hadoop.util.Shell"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DU" type="java.io.File, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Keeps track of disk usage.
|
|
@param path the path to check disk usage in
|
|
@param interval refresh the disk usage at this interval
|
|
@throws IOException if we fail to refresh the disk usage]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="DU" type="java.io.File, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Keeps track of disk usage.
|
|
@param path the path to check disk usage in
|
|
@param conf configuration object
|
|
@throws IOException if we fail to refresh the disk usage]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="decDfsUsed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="long"/>
|
|
<doc>
|
|
<![CDATA[Decrease how much disk space we use.
|
|
@param value decrease by this value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incDfsUsed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increase how much disk space we use.
|
|
@param value increase by this value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUsed" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@return disk space used
|
|
@throws IOException if the shell command fails]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDirPath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the path of which we're keeping track of disk usage]]>
|
|
</doc>
|
|
</method>
|
|
<method name="start"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Start the disk usage checking thread.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="shutdown"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Shut down the refreshing thread.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getExecString" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="parseExecResult"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="lines" type="java.io.BufferedReader"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Filesystem disk space usage statistics. Uses the unix 'du' program]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.DU -->
|
|
<!-- start class org.apache.hadoop.fs.FileChecksum -->
|
|
<class name="FileChecksum" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="FileChecksum"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getAlgorithmName" return="java.lang.String"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The checksum algorithm name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The length of the checksum in bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytes" return="byte[]"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The value of the checksum in bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Return true if both the algorithms and the values are the same.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An abstract class representing file checksums for files.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FileChecksum -->
|
|
<!-- start class org.apache.hadoop.fs.FileStatus -->
|
|
<class name="FileStatus" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="java.lang.Comparable"/>
|
|
<constructor name="FileStatus"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FileStatus" type="long, boolean, int, long, long, org.apache.hadoop.fs.Path"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FileStatus" type="long, boolean, int, long, long, long, org.apache.hadoop.fs.permission.FsPermission, java.lang.String, java.lang.String, org.apache.hadoop.fs.Path"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getLen" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="isDir" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Is this a directory?
|
|
@return true if this is a directory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBlockSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the block size of the file.
|
|
@return the number of bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReplication" return="short"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the replication factor of a file.
|
|
@return the replication factor of a file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getModificationTime" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the modification time of the file.
|
|
@return the modification time of file in milliseconds since January 1, 1970 UTC.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAccessTime" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the access time of the file.
|
|
@return the access time of file in milliseconds since January 1, 1970 UTC.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPermission" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get FsPermission associated with the file.
|
|
@return permssion. If a filesystem does not have a notion of permissions
|
|
or if permissions could not be determined, then default
|
|
permissions equivalent of "rwxrwxrwx" is returned.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOwner" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the owner of the file.
|
|
@return owner of the file. The string could be empty if there is no
|
|
notion of owner of a file in a filesystem or if it could not
|
|
be determined (rare).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getGroup" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the group associated with the file.
|
|
@return group for the file. The string could be empty if there is no
|
|
notion of group of a file in a filesystem or if it could not
|
|
be determined (rare).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<doc>
|
|
<![CDATA[Sets permission.
|
|
@param permission if permission is null, default value is set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOwner"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="owner" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets owner.
|
|
@param owner if it is null, default value is set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setGroup"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="group" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets group.
|
|
@param group if it is null, default value is set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compare this object to another object
|
|
|
|
@param o the object to be compared.
|
|
@return a negative integer, zero, or a positive integer as this object
|
|
is less than, equal to, or greater than the specified object.
|
|
|
|
@throws ClassCastException if the specified object's is not of
|
|
type FileStatus]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compare if this object is equal to another object
|
|
@param o the object to be compared.
|
|
@return true if two file status has the same path name; false if not.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a hash code value for the object, which is defined as
|
|
the hash code of the path name.
|
|
|
|
@return a hash code value for the path name.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface that represents the client side information for a file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FileStatus -->
|
|
<!-- start class org.apache.hadoop.fs.FileSystem -->
|
|
<class name="FileSystem" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.io.Closeable"/>
|
|
<constructor name="FileSystem"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="parseArgs" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Consider using {@link GenericOptionsParser} instead.">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<param name="i" type="int"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Parse the cmd-line args, starting at i. Remove consumed args
|
|
from array. We expect param in the form:
|
|
'-local | -dfs <namenode:port>'
|
|
@deprecated Consider using {@link GenericOptionsParser} instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns the configured filesystem implementation.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the default filesystem URI from a configuration.
|
|
@param conf the configuration to access
|
|
@return the uri of the default filesystem]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDefaultUri"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="uri" type="java.net.URI"/>
|
|
<doc>
|
|
<![CDATA[Set the default filesystem URI in a configuration.
|
|
@param conf the configuration to alter
|
|
@param uri the new default filesystem uri]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDefaultUri"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="uri" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the default filesystem URI in a configuration.
|
|
@param conf the configuration to alter
|
|
@param uri the new default filesystem uri]]>
|
|
</doc>
|
|
</method>
|
|
<method name="initialize"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called after a new FileSystem instance is constructed.
|
|
@param name a uri whose authority section names the host, port, etc.
|
|
for this FileSystem
|
|
@param conf the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a URI whose scheme and authority identify this FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="call #getUri() instead.">
|
|
<doc>
|
|
<![CDATA[@deprecated call #getUri() instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNamed" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="call #get(URI,Configuration) instead.">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated call #get(URI,Configuration) instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocal" return="org.apache.hadoop.fs.LocalFileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the local file syste
|
|
@param conf the configuration to configure the file system with
|
|
@return a LocalFileSystem]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns the FileSystem for this URI's scheme and authority. The scheme
|
|
of the URI determines a configuration property name,
|
|
<tt>fs.<i>scheme</i>.class</tt> whose value names the FileSystem class.
|
|
The entire URI is passed to the FileSystem instance's initialize method.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="closeAll"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close all cached filesystems. Be sure those filesystems are not
|
|
used anymore.
|
|
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeQualified" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Make sure that a path specifies a FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[create a file with the provided permission
|
|
The permission of the file is set to be the provided permission as in
|
|
setPermission, not permission&~umask
|
|
|
|
It is implemented using two RPCs. It is understood that it is inefficient,
|
|
but the implementation is thread-safe. The other option is to change the
|
|
value of umask in configuration to be 0, but it is not thread-safe.
|
|
|
|
@param fs file system handle
|
|
@param file the name of the file to be created
|
|
@param permission the permission of the file
|
|
@return an output stream
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[create a directory with the provided permission
|
|
The permission of the directory is set to be the provided permission as in
|
|
setPermission, not permission&~umask
|
|
|
|
@see #create(FileSystem, Path, FsPermission)
|
|
|
|
@param fs file system handle
|
|
@param dir the name of the directory to be created
|
|
@param permission the permission of the directory
|
|
@return true if the directory creation succeeds; false otherwise
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checkPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Check that a Path belongs to this FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileBlockLocations" return="org.apache.hadoop.fs.BlockLocation[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.FileStatus"/>
|
|
<param name="start" type="long"/>
|
|
<param name="len" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return an array containing hostnames, offset and size of
|
|
portions of the given file. For a nonexistent
|
|
file or regions, null will be returned.
|
|
|
|
This call is most helpful with DFS, where it returns
|
|
hostnames of machines that contain the given file.
|
|
|
|
The FileSystem will simply return an elt containing 'localhost'.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataInputStream at the indicated Path.
|
|
@param f the file name to open
|
|
@param bufferSize the size of the buffer to be used.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataInputStream at the indicated Path.
|
|
@param f the file to open]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path.
|
|
Files are overwritten by default.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create an FSDataOutputStream at the indicated Path with write-progress
|
|
reporting.
|
|
Files are overwritten by default.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="replication" type="short"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path.
|
|
Files are overwritten by default.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path with write-progress
|
|
reporting.
|
|
Files are overwritten by default.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path.
|
|
@param f the file name to open
|
|
@param overwrite if a file with this name already exists, then if true,
|
|
the file will be overwritten, and if false an error will be thrown.
|
|
@param bufferSize the size of the buffer to be used.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path with write-progress
|
|
reporting.
|
|
@param f the file name to open
|
|
@param overwrite if a file with this name already exists, then if true,
|
|
the file will be overwritten, and if false an error will be thrown.
|
|
@param bufferSize the size of the buffer to be used.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path.
|
|
@param f the file name to open
|
|
@param overwrite if a file with this name already exists, then if true,
|
|
the file will be overwritten, and if false an error will be thrown.
|
|
@param bufferSize the size of the buffer to be used.
|
|
@param replication required block replication for the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path with write-progress
|
|
reporting.
|
|
@param f the file name to open
|
|
@param overwrite if a file with this name already exists, then if true,
|
|
the file will be overwritten, and if false an error will be thrown.
|
|
@param bufferSize the size of the buffer to be used.
|
|
@param replication required block replication for the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataOutputStream at the indicated Path with write-progress
|
|
reporting.
|
|
@param f the file name to open
|
|
@param permission
|
|
@param overwrite if a file with this name already exists, then if true,
|
|
the file will be overwritten, and if false an error will be thrown.
|
|
@param bufferSize the size of the buffer to be used.
|
|
@param replication required block replication for the file.
|
|
@param blockSize
|
|
@param progress
|
|
@throws IOException
|
|
@see #setPermission(Path, FsPermission)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createNewFile" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Creates the given Path as a brand-new zero-length file. If
|
|
create fails, or if it already existed, return false.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append to an existing file (optional operation).
|
|
Same as append(f, getConf().getInt("io.file.buffer.size", 4096), null)
|
|
@param f the existing file to be appended.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append to an existing file (optional operation).
|
|
Same as append(f, bufferSize, null).
|
|
@param f the existing file to be appended.
|
|
@param bufferSize the size of the buffer to be used.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append to an existing file (optional operation).
|
|
@param f the existing file to be appended.
|
|
@param bufferSize the size of the buffer to be used.
|
|
@param progress for reporting progress if it is not null.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReplication" return="short"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use getFileStatus() instead">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get replication.
|
|
|
|
@deprecated Use getFileStatus() instead
|
|
@param src file name
|
|
@return file replication
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setReplication" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="replication" type="short"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set replication for an existing file.
|
|
|
|
@param src file name
|
|
@param replication new replication
|
|
@throws IOException
|
|
@return true if successful;
|
|
false if file does not exist or is a directory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Renames Path src to Path dst. Can take place on local fs
|
|
or remote DFS.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use delete(Path, boolean) instead">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Use delete(Path, boolean) instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Delete a file.
|
|
|
|
@param f the path to delete.
|
|
@param recursive if path is a directory and set to
|
|
true, the directory is deleted else throws an exception. In
|
|
case of a file the recursive can be set to either true or false.
|
|
@return true if delete is successful else false.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="deleteOnExit" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark a path to be deleted when FileSystem is closed.
|
|
When the JVM shuts down,
|
|
all FileSystem objects will be closed automatically.
|
|
Then,
|
|
the marked path will be deleted as a result of closing the FileSystem.
|
|
|
|
The path has to exist in the file system.
|
|
|
|
@param f the path to delete.
|
|
@return true if deleteOnExit is successful, otherwise false.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="processDeleteOnExit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Delete all files that were marked as delete-on-exit. This recursively
|
|
deletes all files in the specified paths.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="exists" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check if exists.
|
|
@param f source file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isDirectory" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use getFileStatus() instead">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Use getFileStatus() instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isFile" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[True iff the named path is a regular file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use getFileStatus() instead">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Use getFileStatus() instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getContentSummary" return="org.apache.hadoop.fs.ContentSummary"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the {@link ContentSummary} of a given {@link Path}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[List the statuses of the files/directories in the given path if the path is
|
|
a directory.
|
|
|
|
@param f
|
|
given path
|
|
@return the statuses of the files/directories in the given patch
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="filter" type="org.apache.hadoop.fs.PathFilter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Filter files/directories in the given path using the user-supplied path
|
|
filter.
|
|
|
|
@param f
|
|
a path name
|
|
@param filter
|
|
the user-supplied path filter
|
|
@return an array of FileStatus objects for the files under the given path
|
|
after applying the filter
|
|
@throws IOException
|
|
if encounter any problem while fetching the status]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="files" type="org.apache.hadoop.fs.Path[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Filter files/directories in the given list of paths using default
|
|
path filter.
|
|
|
|
@param files
|
|
a list of paths
|
|
@return a list of statuses for the files under the given paths after
|
|
applying the filter default Path filter
|
|
@exception IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="files" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="filter" type="org.apache.hadoop.fs.PathFilter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Filter files/directories in the given list of paths using user-supplied
|
|
path filter.
|
|
|
|
@param files
|
|
a list of paths
|
|
@param filter
|
|
the user-supplied path filter
|
|
@return a list of statuses for the files under the given paths after
|
|
applying the filter
|
|
@exception IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="globStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathPattern" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>Return all the files that match filePattern and are not checksum
|
|
files. Results are sorted by their names.
|
|
|
|
<p>
|
|
A filename pattern is composed of <i>regular</i> characters and
|
|
<i>special pattern matching</i> characters, which are:
|
|
|
|
<dl>
|
|
<dd>
|
|
<dl>
|
|
<p>
|
|
<dt> <tt> ? </tt>
|
|
<dd> Matches any single character.
|
|
|
|
<p>
|
|
<dt> <tt> * </tt>
|
|
<dd> Matches zero or more characters.
|
|
|
|
<p>
|
|
<dt> <tt> [<i>abc</i>] </tt>
|
|
<dd> Matches a single character from character set
|
|
<tt>{<i>a,b,c</i>}</tt>.
|
|
|
|
<p>
|
|
<dt> <tt> [<i>a</i>-<i>b</i>] </tt>
|
|
<dd> Matches a single character from the character range
|
|
<tt>{<i>a...b</i>}</tt>. Note that character <tt><i>a</i></tt> must be
|
|
lexicographically less than or equal to character <tt><i>b</i></tt>.
|
|
|
|
<p>
|
|
<dt> <tt> [^<i>a</i>] </tt>
|
|
<dd> Matches a single character that is not from character set or range
|
|
<tt>{<i>a</i>}</tt>. Note that the <tt>^</tt> character must occur
|
|
immediately to the right of the opening bracket.
|
|
|
|
<p>
|
|
<dt> <tt> \<i>c</i> </tt>
|
|
<dd> Removes (escapes) any special meaning of character <i>c</i>.
|
|
|
|
<p>
|
|
<dt> <tt> {ab,cd} </tt>
|
|
<dd> Matches a string from the string set <tt>{<i>ab, cd</i>} </tt>
|
|
|
|
<p>
|
|
<dt> <tt> {ab,c{de,fh}} </tt>
|
|
<dd> Matches a string from the string set <tt>{<i>ab, cde, cfh</i>}</tt>
|
|
|
|
</dl>
|
|
</dd>
|
|
</dl>
|
|
|
|
@param pathPattern a regular expression specifying a pth pattern
|
|
|
|
@return an array of paths that match the path pattern
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="globStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathPattern" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="filter" type="org.apache.hadoop.fs.PathFilter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return an array of FileStatus objects whose path names match pathPattern
|
|
and is accepted by the user-supplied path filter. Results are sorted by
|
|
their path names.
|
|
Return null if pathPattern has no glob and the path does not exist.
|
|
Return an empty array if pathPattern has a glob and no path matches it.
|
|
|
|
@param pathPattern
|
|
a regular expression specifying the path pattern
|
|
@param filter
|
|
a user-supplied path filter
|
|
@return an array of FileStatus objects
|
|
@throws IOException if any I/O error occurs when fetching file status]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHomeDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the current user's home directory in this filesystem.
|
|
The default implementation returns "/user/$USER/".]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="new_dir" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the current working directory for the given file system. All relative
|
|
paths will be resolved relative to it.
|
|
|
|
@param new_dir]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the current working directory for the given file system
|
|
@return the directory pathname]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Call {@link #mkdirs(Path, FsPermission)} with default permission.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Make the given file and all non-existent parents into
|
|
directories. Has the semantics of Unix 'mkdir -p'.
|
|
Existence of the directory hierarchy is not an error.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is on the local disk. Add it to FS at
|
|
the given dst name and the source is kept intact afterwards]]>
|
|
</doc>
|
|
</method>
|
|
<method name="moveFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="srcs" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src files is on the local disk. Add it to FS at
|
|
the given dst name, removing the source afterwards.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="moveFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is on the local disk. Add it to FS at
|
|
the given dst name, removing the source afterwards.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is on the local disk. Add it to FS at
|
|
the given dst name.
|
|
delSrc indicates if the source should be removed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="srcs" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src files are on the local disk. Add it to FS at
|
|
the given dst name.
|
|
delSrc indicates if the source should be removed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is on the local disk. Add it to FS at
|
|
the given dst name.
|
|
delSrc indicates if the source should be removed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is under FS, and the dst is on the local disk.
|
|
Copy it from FS control to the local dst name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="moveToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is under FS, and the dst is on the local disk.
|
|
Copy it from FS control to the local dst name.
|
|
Remove the source afterwards]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is under FS, and the dst is on the local disk.
|
|
Copy it from FS control to the local dst name.
|
|
delSrc indicates if the src will be removed or not.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startLocalOutput" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns a local File that the user can write output to. The caller
|
|
provides both the eventual FS target name and the local working
|
|
file. If the FS is local, we write directly into the target. If
|
|
the FS is remote, we write into the tmp local area.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="completeLocalOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called when we're all done writing to the target. A local FS will
|
|
do nothing, because we've written to exactly the right place. A remote
|
|
FS will copy the contents of tmpLocalFile to the correct target at
|
|
fsOutputFile.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[No more filesystem operations are needed. Will
|
|
release any held locks.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUsed" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the total size of all files in the filesystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBlockSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use getFileStatus() instead">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Use getFileStatus() instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultBlockSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the number of bytes that large input files should be optimally
|
|
be split into to minimize i/o time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultReplication" return="short"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the default replication.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return a file status object that represents the path.
|
|
@param f The path we want information from
|
|
@return a FileStatus object
|
|
@throws FileNotFoundException when the path does not exist;
|
|
IOException see specific implementation]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileChecksum" return="org.apache.hadoop.fs.FileChecksum"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the checksum of a file.
|
|
|
|
@param f The file path
|
|
@return The file checksum. The default return value is null,
|
|
which indicates that no checksum algorithm is implemented
|
|
in the corresponding FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set permission of a path.
|
|
@param p
|
|
@param permission]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOwner"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="username" type="java.lang.String"/>
|
|
<param name="groupname" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set owner of a path (i.e. a file or a directory).
|
|
The parameters username and groupname cannot both be null.
|
|
@param p The path
|
|
@param username If it is null, the original username remains unchanged.
|
|
@param groupname If it is null, the original groupname remains unchanged.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTimes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="mtime" type="long"/>
|
|
<param name="atime" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set access time of a file
|
|
@param p The path
|
|
@param mtime Set the modification time of this file.
|
|
The number of milliseconds since Jan 1, 1970.
|
|
A value of -1 means that this call should not set modification time.
|
|
@param atime Set the access time of this file.
|
|
The number of milliseconds since Jan 1, 1970.
|
|
A value of -1 means that this call should not set access time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStatistics" return="org.apache.hadoop.fs.FileSystem.Statistics"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cls" type="java.lang.Class<? extends org.apache.hadoop.fs.FileSystem>"/>
|
|
<doc>
|
|
<![CDATA[Get the statistics for a particular file system
|
|
@param cls the class to lookup
|
|
@return a statistics object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="printStatistics"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="statistics" type="org.apache.hadoop.fs.FileSystem.Statistics"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The statistics for this file system.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An abstract base class for a fairly generic filesystem. It
|
|
may be implemented as a distributed filesystem, or as a "local"
|
|
one that reflects the locally-connected disk. The local version
|
|
exists for small Hadoop instances and for testing.
|
|
|
|
<p>
|
|
|
|
All user code that may potentially use the Hadoop Distributed
|
|
File System should be written to use a FileSystem object. The
|
|
Hadoop DFS is a multi-machine system that appears as a single
|
|
disk. It's useful because of its fault tolerance and potentially
|
|
very large capacity.
|
|
|
|
<p>
|
|
The local implementation is {@link LocalFileSystem} and distributed
|
|
implementation is DistributedFileSystem.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.FileSystem.Statistics -->
|
|
<class name="FileSystem.Statistics" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FileSystem.Statistics"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="incrementBytesRead"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newBytes" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increment the bytes read in the statistics
|
|
@param newBytes the additional bytes read]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrementBytesWritten"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newBytes" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increment the bytes written in the statistics
|
|
@param newBytes the additional bytes written]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesRead" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the total number of bytes read
|
|
@return the number of bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesWritten" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the total number of bytes written
|
|
@return the number of bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FileSystem.Statistics -->
|
|
<!-- start class org.apache.hadoop.fs.FileUtil -->
|
|
<class name="FileUtil" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FileUtil"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="stat2Paths" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stats" type="org.apache.hadoop.fs.FileStatus[]"/>
|
|
<doc>
|
|
<![CDATA[convert an array of FileStatus to an array of Path
|
|
|
|
@param stats
|
|
an array of FileStatus objects
|
|
@return an array of paths corresponding to the input]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stat2Paths" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stats" type="org.apache.hadoop.fs.FileStatus[]"/>
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[convert an array of FileStatus to an array of Path.
|
|
If stats if null, return path
|
|
@param stats
|
|
an array of FileStatus objects
|
|
@param path
|
|
default path to return in stats is null
|
|
@return an array of paths corresponding to the input]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fullyDelete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Delete a directory and all its contents. If
|
|
we return false, the directory may be partially-deleted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fullyDelete"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use {@link FileSystem#delete(Path, boolean)}">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Recursively delete a directory.
|
|
|
|
@param fs {@link FileSystem} on which the path is present
|
|
@param dir directory to recursively delete
|
|
@throws IOException
|
|
@deprecated Use {@link FileSystem#delete(Path, boolean)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copy" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="srcFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dstFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteSource" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copy files between FileSystems.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copy" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="srcFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="srcs" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="dstFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteSource" type="boolean"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="copy" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="srcFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dstFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteSource" type="boolean"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copy files between FileSystems.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyMerge" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="srcFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="srcDir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dstFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dstFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteSource" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="addString" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copy all files in a directory to one output file (merge).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copy" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="java.io.File"/>
|
|
<param name="dstFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteSource" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copy local files to a FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copy" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="srcFS" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="java.io.File"/>
|
|
<param name="deleteSource" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copy FileSystem files to local files.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeShellPath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="filename" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Convert a os-native filename to a path that works for the shell.
|
|
@param filename The filename to convert
|
|
@return The unix pathname
|
|
@throws IOException on windows, there can be problems with the subprocess]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeShellPath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Convert a os-native filename to a path that works for the shell.
|
|
@param file The filename to convert
|
|
@return The unix pathname
|
|
@throws IOException on windows, there can be problems with the subprocess]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeShellPath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="java.io.File"/>
|
|
<param name="makeCanonicalPath" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Convert a os-native filename to a path that works for the shell.
|
|
@param file The filename to convert
|
|
@param makeCanonicalPath
|
|
Whether to make canonical path for the file passed
|
|
@return The unix pathname
|
|
@throws IOException on windows, there can be problems with the subprocess]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDU" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="java.io.File"/>
|
|
<doc>
|
|
<![CDATA[Takes an input dir and returns the du on that local directory. Very basic
|
|
implementation.
|
|
|
|
@param dir
|
|
The input dir to get the disk space of this local dir
|
|
@return The total disk space of the input local directory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unZip"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inFile" type="java.io.File"/>
|
|
<param name="unzipDir" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Given a File input it will unzip the file in a the unzip directory
|
|
passed as the second parameter
|
|
@param inFile The zip file as input
|
|
@param unzipDir The unzip directory where to unzip the zip file.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unTar"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inFile" type="java.io.File"/>
|
|
<param name="untarDir" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Given a Tar File as input it will untar the file in a the untar directory
|
|
passed as the second parameter
|
|
|
|
This utility will untar ".tar" files and ".tar.gz","tgz" files.
|
|
|
|
@param inFile The tar file as input.
|
|
@param untarDir The untar directory where to untar the tar file.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="symLink" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="target" type="java.lang.String"/>
|
|
<param name="linkname" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a soft link between a src and destination
|
|
only on a local disk. HDFS does not support this
|
|
@param target the target for symlink
|
|
@param linkname the symlink
|
|
@return value returned by the command]]>
|
|
</doc>
|
|
</method>
|
|
<method name="chmod" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="filename" type="java.lang.String"/>
|
|
<param name="perm" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<doc>
|
|
<![CDATA[Change the permissions on a filename.
|
|
@param filename the name of the file to change
|
|
@param perm the permission string
|
|
@return the exit code from the command
|
|
@throws IOException
|
|
@throws InterruptedException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createLocalTempFile" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="basefile" type="java.io.File"/>
|
|
<param name="prefix" type="java.lang.String"/>
|
|
<param name="isDeleteOnExit" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a tmp file for a base file.
|
|
@param basefile the base file of the tmp
|
|
@param prefix file name prefix of tmp
|
|
@param isDeleteOnExit if true, the tmp will be deleted when the VM exits
|
|
@return a newly created tmp file
|
|
@exception IOException If a tmp file cannot created
|
|
@see java.io.File#createTempFile(String, String, File)
|
|
@see java.io.File#deleteOnExit()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="replaceFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="java.io.File"/>
|
|
<param name="target" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Move the src file to the name specified by target.
|
|
@param src the source file
|
|
@param target the target file
|
|
@exception IOException If this operation fails]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A collection of file-processing util methods]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FileUtil -->
|
|
<!-- start class org.apache.hadoop.fs.FileUtil.HardLink -->
|
|
<class name="FileUtil.HardLink" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FileUtil.HardLink"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="createHardLink"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="target" type="java.io.File"/>
|
|
<param name="linkName" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Creates a hardlink]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLinkCount" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fileName" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Retrieves the number of links to the specified file.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Class for creating hardlinks.
|
|
Supports Unix, Cygwin, WindXP.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FileUtil.HardLink -->
|
|
<!-- start class org.apache.hadoop.fs.FilterFileSystem -->
|
|
<class name="FilterFileSystem" extends="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FilterFileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FilterFileSystem" type="org.apache.hadoop.fs.FileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called after a new FileSystem instance is constructed.
|
|
@param name a uri whose authority section names the host, port, etc.
|
|
for this FileSystem
|
|
@param conf the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a URI whose scheme and authority identify this FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="call #getUri() instead.">
|
|
<doc>
|
|
<![CDATA[@deprecated call #getUri() instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeQualified" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Make sure that a path specifies a FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checkPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Check that a Path belongs to this FileSystem.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileBlockLocations" return="org.apache.hadoop.fs.BlockLocation[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.FileStatus"/>
|
|
<param name="start" type="long"/>
|
|
<param name="len" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Opens an FSDataInputStream at the indicated Path.
|
|
@param f the file name to open
|
|
@param bufferSize the size of the buffer to be used.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setReplication" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="replication" type="short"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set replication for an existing file.
|
|
|
|
@param src file name
|
|
@param replication new replication
|
|
@throws IOException
|
|
@return true if successful;
|
|
false if file does not exist or is a directory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Renames Path src to Path dst. Can take place on local fs
|
|
or remote DFS.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Delete a file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Delete a file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[List files in a directory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHomeDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newDir" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the current working directory for the given file system. All relative
|
|
paths will be resolved relative to it.
|
|
|
|
@param newDir]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the current working directory for the given file system
|
|
|
|
@return the directory pathname]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is on the local disk. Add it to FS at
|
|
the given dst name.
|
|
delSrc indicates if the source should be removed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The src file is under FS, and the dst is on the local disk.
|
|
Copy it from FS control to the local dst name.
|
|
delSrc indicates if the src will be removed or not.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startLocalOutput" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns a local File that the user can write output to. The caller
|
|
provides both the eventual FS target name and the local working
|
|
file. If the FS is local, we write directly into the target. If
|
|
the FS is remote, we write into the tmp local area.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="completeLocalOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called when we're all done writing to the target. A local FS will
|
|
do nothing, because we've written to exactly the right place. A remote
|
|
FS will copy the contents of tmpLocalFile to the correct target at
|
|
fsOutputFile.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultBlockSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the number of bytes that large input files should be optimally
|
|
be split into to minimize i/o time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultReplication" return="short"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the default replication.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get file status.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileChecksum" return="org.apache.hadoop.fs.FileChecksum"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setOwner"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="username" type="java.lang.String"/>
|
|
<param name="groupname" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<field name="fs" type="org.apache.hadoop.fs.FileSystem"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A <code>FilterFileSystem</code> contains
|
|
some other file system, which it uses as
|
|
its basic file system, possibly transforming
|
|
the data along the way or providing additional
|
|
functionality. The class <code>FilterFileSystem</code>
|
|
itself simply overrides all methods of
|
|
<code>FileSystem</code> with versions that
|
|
pass all requests to the contained file
|
|
system. Subclasses of <code>FilterFileSystem</code>
|
|
may further override some of these methods
|
|
and may also provide additional methods
|
|
and fields.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FilterFileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.FSDataInputStream -->
|
|
<class name="FSDataInputStream" extends="java.io.DataInputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.fs.Seekable"/>
|
|
<implements name="org.apache.hadoop.fs.PositionedReadable"/>
|
|
<constructor name="FSDataInputStream" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="desired" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="seekToNewSource" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="targetPos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Utility that wraps a {@link FSInputStream} in a {@link DataInputStream}
|
|
and buffers input through a {@link BufferedInputStream}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FSDataInputStream -->
|
|
<!-- start class org.apache.hadoop.fs.FSDataOutputStream -->
|
|
<class name="FSDataOutputStream" extends="java.io.DataOutputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.fs.Syncable"/>
|
|
<constructor name="FSDataOutputStream" type="java.io.OutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<constructor name="FSDataOutputStream" type="java.io.OutputStream, org.apache.hadoop.fs.FileSystem.Statistics"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<constructor name="FSDataOutputStream" type="java.io.OutputStream, org.apache.hadoop.fs.FileSystem.Statistics, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getWrappedStream" return="java.io.OutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="sync"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Utility that wraps a {@link OutputStream} in a {@link DataOutputStream},
|
|
buffers output through a {@link BufferedOutputStream} and creates a checksum
|
|
file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FSDataOutputStream -->
|
|
<!-- start class org.apache.hadoop.fs.FSError -->
|
|
<class name="FSError" extends="java.lang.Error"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Thrown for unexpected filesystem errors, presumed to reflect disk errors
|
|
in the native filesystem.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FSError -->
|
|
<!-- start class org.apache.hadoop.fs.FSInputChecker -->
|
|
<class name="FSInputChecker" extends="org.apache.hadoop.fs.FSInputStream"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FSInputChecker" type="org.apache.hadoop.fs.Path, int"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor
|
|
|
|
@param file The name of the file to be read
|
|
@param numOfRetries Number of read retries when ChecksumError occurs]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="FSInputChecker" type="org.apache.hadoop.fs.Path, int, boolean, java.util.zip.Checksum, int, int"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor
|
|
|
|
@param file The name of the file to be read
|
|
@param numOfRetries Number of read retries when ChecksumError occurs
|
|
@param sum the type of Checksum engine
|
|
@param chunkSize maximun chunk size
|
|
@param checksumSize the number byte of each checksum]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="readChunk" return="int"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<param name="buf" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<param name="checksum" type="byte[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads in next checksum chunk data into <code>buf</code> at <code>offset</code>
|
|
and checksum into <code>checksum</code>.
|
|
The method is used for implementing read, therefore, it should be optimized
|
|
for sequential reading
|
|
@param pos chunkPos
|
|
@param buf desitination buffer
|
|
@param offset offset in buf at which to store data
|
|
@param len maximun number of bytes to read
|
|
@return number of bytes read]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getChunkPosition" return="long"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<doc>
|
|
<![CDATA[Return position of beginning of chunk containing pos.
|
|
|
|
@param pos a postion in the file
|
|
@return the starting position of the chunk which contains the byte]]>
|
|
</doc>
|
|
</method>
|
|
<method name="needChecksum" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return true if there is a need for checksum verification]]>
|
|
</doc>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read one checksum-verified byte
|
|
|
|
@return the next byte of data, or <code>-1</code> if the end of the
|
|
stream is reached.
|
|
@exception IOException if an I/O error occurs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read checksum verified bytes from this byte-input stream into
|
|
the specified byte array, starting at the given offset.
|
|
|
|
<p> This method implements the general contract of the corresponding
|
|
<code>{@link InputStream#read(byte[], int, int) read}</code> method of
|
|
the <code>{@link InputStream}</code> class. As an additional
|
|
convenience, it attempts to read as many bytes as possible by repeatedly
|
|
invoking the <code>read</code> method of the underlying stream. This
|
|
iterated <code>read</code> continues until one of the following
|
|
conditions becomes true: <ul>
|
|
|
|
<li> The specified number of bytes have been read,
|
|
|
|
<li> The <code>read</code> method of the underlying stream returns
|
|
<code>-1</code>, indicating end-of-file.
|
|
|
|
</ul> If the first <code>read</code> on the underlying stream returns
|
|
<code>-1</code> to indicate end-of-file then this method returns
|
|
<code>-1</code>. Otherwise this method returns the number of bytes
|
|
actually read.
|
|
|
|
@param b destination buffer.
|
|
@param off offset at which to start storing bytes.
|
|
@param len maximum number of bytes to read.
|
|
@return the number of bytes read, or <code>-1</code> if the end of
|
|
the stream has been reached.
|
|
@exception IOException if an I/O error occurs.
|
|
ChecksumException if any checksum error occurs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checksum2long" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="checksum" type="byte[]"/>
|
|
<doc>
|
|
<![CDATA[Convert a checksum byte array to a long]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="available" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="skip" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Skips over and discards <code>n</code> bytes of data from the
|
|
input stream.
|
|
|
|
<p>This method may skip more bytes than are remaining in the backing
|
|
file. This produces no exception and the number of bytes skipped
|
|
may include some number of bytes that were beyond the EOF of the
|
|
backing file. Attempting to read from the stream after skipping past
|
|
the end will result in -1 indicating the end of the file.
|
|
|
|
<p>If <code>n</code> is negative, no bytes are skipped.
|
|
|
|
@param n the number of bytes to be skipped.
|
|
@return the actual number of bytes skipped.
|
|
@exception IOException if an I/O error occurs.
|
|
ChecksumException if the chunk to skip to is corrupted]]>
|
|
</doc>
|
|
</method>
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Seek to the given position in the stream.
|
|
The next read() will be from that position.
|
|
|
|
<p>This method may seek past the end of the file.
|
|
This produces no exception and an attempt to read from
|
|
the stream will result in -1 indicating the end of the file.
|
|
|
|
@param pos the postion to seek to.
|
|
@exception IOException if an I/O error occurs.
|
|
ChecksumException if the chunk to seek to is corrupted]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFully" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="stm" type="java.io.InputStream"/>
|
|
<param name="buf" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[A utility function that tries to read up to <code>len</code> bytes from
|
|
<code>stm</code>
|
|
|
|
@param stm an input stream
|
|
@param buf destiniation buffer
|
|
@param offset offset at which to store data
|
|
@param len number of bytes to read
|
|
@return actual number of bytes read
|
|
@throws IOException if there is any IO error]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="sum" type="java.util.zip.Checksum"/>
|
|
<param name="maxChunkSize" type="int"/>
|
|
<param name="checksumSize" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the checksum related parameters
|
|
@param sum which type of checksum to use
|
|
@param maxChunkSize maximun chunk size
|
|
@param checksumSize checksum size]]>
|
|
</doc>
|
|
</method>
|
|
<method name="markSupported" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="mark"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="readlimit" type="int"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="file" type="org.apache.hadoop.fs.Path"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The file name from which data is read from]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This is a generic input stream for verifying checksums for
|
|
data before it is read by a user.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FSInputChecker -->
|
|
<!-- start class org.apache.hadoop.fs.FSInputStream -->
|
|
<class name="FSInputStream" extends="java.io.InputStream"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.fs.Seekable"/>
|
|
<implements name="org.apache.hadoop.fs.PositionedReadable"/>
|
|
<constructor name="FSInputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="seek"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Seek to the given offset from the start of the file.
|
|
The next read() will be from that location. Can't
|
|
seek past the end of the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the current offset from the start of the file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="seekToNewSource" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="targetPos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Seeks a different copy of the data. Returns true if
|
|
found a new source, false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[FSInputStream is a generic old InputStream with a little bit
|
|
of RAF-style seek ability.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FSInputStream -->
|
|
<!-- start class org.apache.hadoop.fs.FSOutputSummer -->
|
|
<class name="FSOutputSummer" extends="java.io.OutputStream"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FSOutputSummer" type="java.util.zip.Checksum, int, int"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="writeChunk"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<param name="checksum" type="byte[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write one byte]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes <code>len</code> bytes from the specified byte array
|
|
starting at offset <code>off</code> and generate a checksum for
|
|
each data chunk.
|
|
|
|
<p> This method stores bytes from the given array into this
|
|
stream's buffer before it gets checksumed. The buffer gets checksumed
|
|
and flushed to the underlying output stream when all data
|
|
in a checksum chunk are in the buffer. If the buffer is empty and
|
|
requested length is at least as large as the size of next checksum chunk
|
|
size, this method will checksum and write the chunk directly
|
|
to the underlying output stream. Thus it avoids uneccessary data copy.
|
|
|
|
@param b the data.
|
|
@param off the start offset in the data.
|
|
@param len the number of bytes to write.
|
|
@exception IOException if an I/O error occurs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="flushBuffer"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="flushBuffer"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="keep" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="convertToByteStream" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="sum" type="java.util.zip.Checksum"/>
|
|
<param name="checksumSize" type="int"/>
|
|
<doc>
|
|
<![CDATA[Converts a checksum integer value to a byte stream]]>
|
|
</doc>
|
|
</method>
|
|
<method name="resetChecksumChunk"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="size" type="int"/>
|
|
<doc>
|
|
<![CDATA[Resets existing buffer with a new one of the specified size.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This is a generic output stream for generating checksums for
|
|
data before it is written to the underlying stream]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FSOutputSummer -->
|
|
<!-- start class org.apache.hadoop.fs.FsShell -->
|
|
<class name="FsShell" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="FsShell"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FsShell" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="init"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getCurrentTrashDir" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the Trash object associated with this shell.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="byteDesc" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="len" type="long"/>
|
|
<doc>
|
|
<![CDATA[Return an abbreviated English-language desc of the byte length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="limitDecimalTo2" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="d" type="double"/>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[run]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[main() has some simple utility methods]]>
|
|
</doc>
|
|
</method>
|
|
<field name="fs" type="org.apache.hadoop.fs.FileSystem"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="dateForm" type="java.text.SimpleDateFormat"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="modifFmt" type="java.text.SimpleDateFormat"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Provide command line access to a FileSystem.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FsShell -->
|
|
<!-- start class org.apache.hadoop.fs.FsUrlStreamHandlerFactory -->
|
|
<class name="FsUrlStreamHandlerFactory" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.net.URLStreamHandlerFactory"/>
|
|
<constructor name="FsUrlStreamHandlerFactory"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FsUrlStreamHandlerFactory" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="createURLStreamHandler" return="java.net.URLStreamHandler"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Factory for URL stream handlers.
|
|
|
|
There is only one handler whose job is to create UrlConnections. A
|
|
FsUrlConnection relies on FileSystem to choose the appropriate FS
|
|
implementation.
|
|
|
|
Before returning our handler, we make sure that FileSystem knows an
|
|
implementation for the requested scheme/protocol.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.FsUrlStreamHandlerFactory -->
|
|
<!-- start class org.apache.hadoop.fs.HarFileSystem -->
|
|
<class name="HarFileSystem" extends="org.apache.hadoop.fs.FilterFileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="HarFileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[public construction of harfilesystem]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="HarFileSystem" type="org.apache.hadoop.fs.FileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor to create a HarFileSystem with an
|
|
underlying filesystem.
|
|
@param fs]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Initialize a Har filesystem per har archive. The
|
|
archive home directory is the top level directory
|
|
in the filesystem that contains the HAR archive.
|
|
Be careful with this method, you do not want to go
|
|
on creating new Filesystem instances per call to
|
|
path.getFileSystem().
|
|
the uri of Har is
|
|
har://underlyingfsscheme-host:port/archivepath.
|
|
or
|
|
har:///archivepath. This assumes the underlying filesystem
|
|
to be used in case not specified.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHarVersion" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[return the top level archive.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the uri of this filesystem.
|
|
The uri is of the form
|
|
har://underlyingfsschema-host:port/pathintheunderlyingfs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeQualified" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<method name="getFileBlockLocations" return="org.apache.hadoop.fs.BlockLocation[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.FileStatus"/>
|
|
<param name="start" type="long"/>
|
|
<param name="len" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[get block locations from the underlying fs
|
|
@param file the input filestatus to get block locations
|
|
@param start the start in the file
|
|
@param len the length in the file
|
|
@return block locations for this segment of file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHarHash" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[the hash of the path p inside iniside
|
|
the filesystem
|
|
@param p the path in the harfilesystem
|
|
@return the hash code of the path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[return the filestatus of files in har archive.
|
|
The permission returned are that of the archive
|
|
index files. The permissions are not persisted
|
|
while creating a hadoop archive.
|
|
@param f the path in har filesystem
|
|
@return filestatus.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns a har input stream which fakes end of
|
|
file. It reads the index files to get the part
|
|
file name and the size and start of the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setReplication" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="replication" type="short"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[liststatus returns the children of a directory
|
|
after looking up the index files.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHomeDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[return the top level archive path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newDir" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[copies the file in the har filesystem to a local file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startLocalOutput" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="completeLocalOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOwner"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="username" type="java.lang.String"/>
|
|
<param name="groupname" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permisssion" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Not implemented.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="VERSION" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This is an implementation of the Hadoop Archive
|
|
Filesystem. This archive Filesystem has index files
|
|
of the form _index* and has contents of the form
|
|
part-*. The index files store the indexes of the
|
|
real files. The index files are of the form _masterindex
|
|
and _index. The master index is a level of indirection
|
|
in to the index file to make the look ups faster. the index
|
|
file is sorted with hash code of the paths that it contains
|
|
and the master index contains pointers to the positions in
|
|
index for ranges of hashcodes.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.HarFileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.InMemoryFileSystem -->
|
|
<class name="InMemoryFileSystem" extends="org.apache.hadoop.fs.ChecksumFileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="InMemoryFileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="InMemoryFileSystem" type="java.net.URI, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="reserveSpaceWithCheckSum" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="size" type="long"/>
|
|
<doc>
|
|
<![CDATA[Register a file with its size. This will also register a checksum for the
|
|
file that the user is trying to create. This is required since none of
|
|
the FileSystem APIs accept the size of the file as argument. But since it
|
|
is required for us to apriori know the size of the file we are going to
|
|
create, the user must call this method for each file he wants to create
|
|
and reserve memory for that file. We either succeed in reserving memory
|
|
for both the main file and the checksum file and return true, or return
|
|
false.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFiles" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="filter" type="org.apache.hadoop.fs.PathFilter"/>
|
|
</method>
|
|
<method name="getNumFiles" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="filter" type="org.apache.hadoop.fs.PathFilter"/>
|
|
</method>
|
|
<method name="getFSSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getPercentUsed" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An implementation of the in-memory filesystem. This implementation assumes
|
|
that the file lengths are known ahead of time and the total lengths of all
|
|
the files is below a certain number (like 100 MB, configurable). Use the API
|
|
reserveSpaceWithCheckSum(Path f, int size) (see below for a description of
|
|
the API for reserving space in the FS. The uri of this filesystem starts with
|
|
ramfs:// .]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.InMemoryFileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.LocalDirAllocator -->
|
|
<class name="LocalDirAllocator" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LocalDirAllocator" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create an allocator object
|
|
@param contextCfgItemName]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getLocalPathForWrite" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathStr" type="java.lang.String"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get a path from the local FS. This method should be used if the size of
|
|
the file is not known apriori. We go round-robin over the set of disks
|
|
(via the configured dirs) and return the first complete path where
|
|
we could create the parent directory of the passed path.
|
|
@param pathStr the requested path (this will be created on the first
|
|
available disk)
|
|
@param conf the Configuration object
|
|
@return the complete path to the file on a local disk
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalPathForWrite" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathStr" type="java.lang.String"/>
|
|
<param name="size" type="long"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get a path from the local FS. Pass size as -1 if not known apriori. We
|
|
round-robin over the set of disks (via the configured dirs) and return
|
|
the first complete path which has enough space
|
|
@param pathStr the requested path (this will be created on the first
|
|
available disk)
|
|
@param size the size of the file that is going to be written
|
|
@param conf the Configuration object
|
|
@return the complete path to the file on a local disk
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalPathToRead" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathStr" type="java.lang.String"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get a path from the local FS for reading. We search through all the
|
|
configured dirs for the file's existence and return the complete
|
|
path to the file when we find one
|
|
@param pathStr the requested file (this will be searched)
|
|
@param conf the Configuration object
|
|
@return the complete path to the file on a local disk
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createTmpFileForWrite" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathStr" type="java.lang.String"/>
|
|
<param name="size" type="long"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Creates a temporary file in the local FS. Pass size as -1 if not known
|
|
apriori. We round-robin over the set of disks (via the configured dirs)
|
|
and select the first complete path which has enough space. A file is
|
|
created on this directory. The file is guaranteed to go away when the
|
|
JVM exits.
|
|
@param pathStr prefix for the temporary file
|
|
@param size the size of the file that is going to be written
|
|
@param conf the Configuration object
|
|
@return a unique temporary file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isContextValid" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextCfgItemName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Method to check whether a context is valid
|
|
@param contextCfgItemName
|
|
@return true/false]]>
|
|
</doc>
|
|
</method>
|
|
<method name="ifExists" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathStr" type="java.lang.String"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[We search through all the configured dirs for the file's existence
|
|
and return true when we find
|
|
@param pathStr the requested file (this will be searched)
|
|
@param conf the Configuration object
|
|
@return true if files exist. false otherwise
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An implementation of a round-robin scheme for disk allocation for creating
|
|
files. The way it works is that it is kept track what disk was last
|
|
allocated for a file write. For the current request, the next disk from
|
|
the set of disks would be allocated if the free space on the disk is
|
|
sufficient enough to accomodate the file that is being considered for
|
|
creation. If the space requirements cannot be met, the next disk in order
|
|
would be tried and so on till a disk is found with sufficient capacity.
|
|
Once a disk with sufficient space is identified, a check is done to make
|
|
sure that the disk is writable. Also, there is an API provided that doesn't
|
|
take the space requirements into consideration but just checks whether the
|
|
disk under consideration is writable (this should be used for cases where
|
|
the file size is not known apriori). An API is provided to read a path that
|
|
was created earlier. That API works by doing a scan of all the disks for the
|
|
input pathname.
|
|
This implementation also provides the functionality of having multiple
|
|
allocators per JVM (one for each unique functionality or context, like
|
|
mapred, dfs-client, etc.). It ensures that there is only one instance of
|
|
an allocator per context per JVM.
|
|
Note:
|
|
1. The contexts referred above are actually the configuration items defined
|
|
in the Configuration class like "mapred.local.dir" (for which we want to
|
|
control the dir allocations). The context-strings are exactly those
|
|
configuration items.
|
|
2. This implementation does not take into consideration cases where
|
|
a disk becomes read-only or goes out of space while a file is being written
|
|
to (disks are shared between multiple processes, and so the latter situation
|
|
is probable).
|
|
3. In the class implementation, "Disk" is referred to as "Dir", which
|
|
actually points to the configured directory on the Disk which will be the
|
|
parent for all file write/read allocations.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.LocalDirAllocator -->
|
|
<!-- start class org.apache.hadoop.fs.LocalFileSystem -->
|
|
<class name="LocalFileSystem" extends="org.apache.hadoop.fs.ChecksumFileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LocalFileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="LocalFileSystem" type="org.apache.hadoop.fs.FileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRaw" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="pathToFile" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Convert a path to a File.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reportChecksumFailure" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="in" type="org.apache.hadoop.fs.FSDataInputStream"/>
|
|
<param name="inPos" type="long"/>
|
|
<param name="sums" type="org.apache.hadoop.fs.FSDataInputStream"/>
|
|
<param name="sumsPos" type="long"/>
|
|
<doc>
|
|
<![CDATA[Moves files to a bad file directory on the same device, so that their
|
|
storage will not be reused.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Implement the FileSystem API for the checksumed local filesystem.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.LocalFileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.MD5MD5CRC32FileChecksum -->
|
|
<class name="MD5MD5CRC32FileChecksum" extends="org.apache.hadoop.fs.FileChecksum"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MD5MD5CRC32FileChecksum"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Same as this(0, 0, null)]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MD5MD5CRC32FileChecksum" type="int, long, org.apache.hadoop.io.MD5Hash"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a MD5FileChecksum]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getAlgorithmName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytes" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="xml" type="org.znerd.xmlenc.XMLOutputter"/>
|
|
<param name="that" type="org.apache.hadoop.fs.MD5MD5CRC32FileChecksum"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write that object to xml output.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.fs.MD5MD5CRC32FileChecksum"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="attrs" type="org.xml.sax.Attributes"/>
|
|
<exception name="SAXException" type="org.xml.sax.SAXException"/>
|
|
<doc>
|
|
<![CDATA[Return the object represented in the attributes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LENGTH" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[MD5 of MD5 of CRC32.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.MD5MD5CRC32FileChecksum -->
|
|
<!-- start class org.apache.hadoop.fs.Path -->
|
|
<class name="Path" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Comparable"/>
|
|
<constructor name="Path" type="java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resolve a child path against a parent path.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Path" type="org.apache.hadoop.fs.Path, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resolve a child path against a parent path.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Path" type="java.lang.String, org.apache.hadoop.fs.Path"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resolve a child path against a parent path.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Path" type="org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resolve a child path against a parent path.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Path" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a path from a String. Path strings are URIs, but with
|
|
unescaped elements and some additional normalization.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Path" type="java.lang.String, java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a Path from components.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="toUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Convert this to a URI.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFileSystem" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the FileSystem that owns this Path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isAbsolute" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[True if the directory of this path is absolute.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the final component of this path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getParent" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the parent of a path or null if at root.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="suffix" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="suffix" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Adds a suffix to the final name in the path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="depth" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the number of elements in this path.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeQualified" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<doc>
|
|
<![CDATA[Returns a qualified path object.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="SEPARATOR" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The directory separator, a slash.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="SEPARATOR_CHAR" type="char"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="CUR_DIR" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Names a file or directory in a {@link FileSystem}.
|
|
Path strings use slash as the directory separator. A path string is
|
|
absolute if it begins with a slash.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.Path -->
|
|
<!-- start interface org.apache.hadoop.fs.PathFilter -->
|
|
<interface name="PathFilter" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Tests whether or not the specified abstract pathname should be
|
|
included in a pathname list.
|
|
|
|
@param path The abstract pathname to be tested
|
|
@return <code>true</code> if and only if <code>pathname</code>
|
|
should be included]]>
|
|
</doc>
|
|
</method>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.fs.PathFilter -->
|
|
<!-- start interface org.apache.hadoop.fs.PositionedReadable -->
|
|
<interface name="PositionedReadable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read upto the specified number of bytes, from a given
|
|
position within a file, and return the number of bytes read. This does not
|
|
change the current offset of a file, and is thread-safe.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the specified number of bytes, from a given
|
|
position within a file. This does not
|
|
change the current offset of a file, and is thread-safe.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<param name="buffer" type="byte[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read number of bytes equalt to the length of the buffer, from a given
|
|
position within a file. This does not
|
|
change the current offset of a file, and is thread-safe.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Stream that permits positional reading.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.fs.PositionedReadable -->
|
|
<!-- start class org.apache.hadoop.fs.RawLocalFileSystem -->
|
|
<class name="RawLocalFileSystem" extends="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RawLocalFileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="pathToFile" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Convert a path to a File.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="deprecated, no comment">
|
|
<doc>
|
|
<![CDATA[@deprecated]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Creates the specified directory hierarchy. Does not
|
|
treat existence as an error.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHomeDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newDir" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the working directory to the given directory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="lock"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="deprecated, no comment">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="shared" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated]]>
|
|
</doc>
|
|
</method>
|
|
<method name="release"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="deprecated, no comment">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated]]>
|
|
</doc>
|
|
</method>
|
|
<method name="moveFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startLocalOutput" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="completeLocalOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsWorkingFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setOwner"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="username" type="java.lang.String"/>
|
|
<param name="groupname" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Use the command chown to set owner.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Use the command chmod to set permission.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Implement the FileSystem API for the raw local filesystem.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.RawLocalFileSystem -->
|
|
<!-- start interface org.apache.hadoop.fs.Seekable -->
|
|
<interface name="Seekable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Seek to the given offset from the start of the file.
|
|
The next read() will be from that location. Can't
|
|
seek past the end of the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the current offset from the start of the file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="seekToNewSource" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="targetPos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Seeks a different copy of the data. Returns true if
|
|
found a new source, false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Stream that permits seeking.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.fs.Seekable -->
|
|
<!-- start interface org.apache.hadoop.fs.Syncable -->
|
|
<interface name="Syncable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="sync"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Synchronize all buffer with the underlying devices.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This interface declare the sync() operation.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.fs.Syncable -->
|
|
<!-- start class org.apache.hadoop.fs.Trash -->
|
|
<class name="Trash" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Trash" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a trash can accessor.
|
|
@param conf a Configuration]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Trash" type="org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a trash can accessor for the FileSystem provided.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="moveToTrash" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Move a file or directory to the current trash directory.
|
|
@return false if the item is already in the trash or trash is disabled]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checkpoint"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a trash checkpoint.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="expunge"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Delete old checkpoints.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getEmptier" return="java.lang.Runnable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return a {@link Runnable} that periodically empties the trash of all
|
|
users, intended to be run by the superuser. Only one checkpoint is kept
|
|
at a time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[Run an emptier.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Provides a <i>trash</i> feature. Files are moved to a user's trash
|
|
directory, a subdirectory of their home directory named ".Trash". Files are
|
|
initially moved to a <i>current</i> sub-directory of the trash directory.
|
|
Within that sub-directory their original path is preserved. Periodically
|
|
one may checkpoint the current trash and remove older checkpoints. (This
|
|
design permits trash management without enumeration of the full trash
|
|
content, without date support in the filesystem, and without clock
|
|
synchronization.)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.Trash -->
|
|
</package>
|
|
<package name="org.apache.hadoop.fs.ftp">
|
|
<!-- start class org.apache.hadoop.fs.ftp.FTPException -->
|
|
<class name="FTPException" extends="java.lang.RuntimeException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FTPException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FTPException" type="java.lang.Throwable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FTPException" type="java.lang.String, java.lang.Throwable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[A class to wrap a {@link Throwable} into a Runtime Exception.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.ftp.FTPException -->
|
|
<!-- start class org.apache.hadoop.fs.ftp.FTPFileSystem -->
|
|
<class name="FTPFileSystem" extends="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FTPFileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[A stream obtained via this call must be closed before using other APIs of
|
|
this class or else the invocation will block.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This optional operation is not yet supported.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use delete(Path, boolean) instead">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Use delete(Path, boolean) instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getHomeDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newDir" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DEFAULT_BUFFER_SIZE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DEFAULT_BLOCK_SIZE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A {@link FileSystem} backed by an FTP client provided by <a
|
|
href="http://commons.apache.org/net/">Apache Commons Net</a>.
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.ftp.FTPFileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.ftp.FTPInputStream -->
|
|
<class name="FTPInputStream" extends="org.apache.hadoop.fs.FSInputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FTPInputStream" type="java.io.InputStream, org.apache.commons.net.ftp.FTPClient, org.apache.hadoop.fs.FileSystem.Statistics"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="seekToNewSource" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="targetPos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="markSupported" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="mark"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="readLimit" type="int"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.ftp.FTPInputStream -->
|
|
</package>
|
|
<package name="org.apache.hadoop.fs.kfs">
|
|
<!-- start class org.apache.hadoop.fs.kfs.KosmosFileSystem -->
|
|
<class name="KosmosFileSystem" extends="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="KosmosFileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="isDirectory" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="isFile" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This optional operation is not yet supported.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getReplication" return="short"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getDefaultReplication" return="short"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setReplication" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="replication" type="short"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getDefaultBlockSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="lock"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="shared" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="release"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getFileBlockLocations" return="org.apache.hadoop.fs.BlockLocation[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.FileStatus"/>
|
|
<param name="start" type="long"/>
|
|
<param name="len" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return null if the file doesn't exist; otherwise, get the
|
|
locations of the various chunks of the file file from KFS.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyFromLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="copyToLocalFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="delSrc" type="boolean"/>
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startLocalOutput" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="completeLocalOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fsOutputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="tmpLocalFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A FileSystem backed by KFS.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.kfs.KosmosFileSystem -->
|
|
</package>
|
|
<package name="org.apache.hadoop.fs.permission">
|
|
<!-- start class org.apache.hadoop.fs.permission.AccessControlException -->
|
|
<class name="AccessControlException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="AccessControlException"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default constructor is needed for unwrapping from
|
|
{@link org.apache.hadoop.ipc.RemoteException}.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="AccessControlException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs an {@link AccessControlException}
|
|
with the specified detail message.
|
|
@param s the detail message.]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.permission.AccessControlException -->
|
|
<!-- start class org.apache.hadoop.fs.permission.FsAction -->
|
|
<class name="FsAction" extends="java.lang.Enum<org.apache.hadoop.fs.permission.FsAction>"
|
|
abstract="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.fs.permission.FsAction[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.fs.permission.FsAction"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<method name="implies" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="that" type="org.apache.hadoop.fs.permission.FsAction"/>
|
|
<doc>
|
|
<![CDATA[Return true if this action implies that action.
|
|
@param that]]>
|
|
</doc>
|
|
</method>
|
|
<method name="and" return="org.apache.hadoop.fs.permission.FsAction"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="that" type="org.apache.hadoop.fs.permission.FsAction"/>
|
|
<doc>
|
|
<![CDATA[AND operation.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="or" return="org.apache.hadoop.fs.permission.FsAction"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="that" type="org.apache.hadoop.fs.permission.FsAction"/>
|
|
<doc>
|
|
<![CDATA[OR operation.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="not" return="org.apache.hadoop.fs.permission.FsAction"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[NOT operation.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="INDEX" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Octal representation]]>
|
|
</doc>
|
|
</field>
|
|
<field name="SYMBOL" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Symbolic representation]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[File system actions, e.g. read, write, etc.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.permission.FsAction -->
|
|
<!-- start class org.apache.hadoop.fs.permission.FsPermission -->
|
|
<class name="FsPermission" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="FsPermission" type="org.apache.hadoop.fs.permission.FsAction, org.apache.hadoop.fs.permission.FsAction, org.apache.hadoop.fs.permission.FsAction"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct by the given {@link FsAction}.
|
|
@param u user action
|
|
@param g group action
|
|
@param o other action]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="FsPermission" type="short"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct by the given mode.
|
|
@param mode
|
|
@see #toShort()]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="FsPermission" type="org.apache.hadoop.fs.permission.FsPermission"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Copy constructor
|
|
|
|
@param other other permission]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="createImmutable" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="permission" type="short"/>
|
|
<doc>
|
|
<![CDATA[Create an immutable {@link FsPermission} object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUserAction" return="org.apache.hadoop.fs.permission.FsAction"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return user {@link FsAction}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getGroupAction" return="org.apache.hadoop.fs.permission.FsAction"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return group {@link FsAction}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOtherAction" return="org.apache.hadoop.fs.permission.FsAction"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return other {@link FsAction}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fromShort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="short"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="read" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create and initialize a {@link FsPermission} from {@link DataInput}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toShort" return="short"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Encode the object to a short.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="obj" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="applyUMask" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="umask" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<doc>
|
|
<![CDATA[Apply a umask to this permission and return a new one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUMask" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the user file creation mask (umask)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setUMask"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="umask" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<doc>
|
|
<![CDATA[Set the user file creation mask (umask)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefault" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the default permission.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="unixSymbolicPermission" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Create a FsPermission from a Unix symbolic permission string
|
|
@param unixSymbolicPermission e.g. "-rw-rw-rw-"]]>
|
|
</doc>
|
|
</method>
|
|
<field name="UMASK_LABEL" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[umask property label]]>
|
|
</doc>
|
|
</field>
|
|
<field name="DEFAULT_UMASK" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A class for file/directory permissions.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.permission.FsPermission -->
|
|
<!-- start class org.apache.hadoop.fs.permission.PermissionStatus -->
|
|
<class name="PermissionStatus" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="PermissionStatus" type="java.lang.String, java.lang.String, org.apache.hadoop.fs.permission.FsPermission"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="createImmutable" return="org.apache.hadoop.fs.permission.PermissionStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="user" type="java.lang.String"/>
|
|
<param name="group" type="java.lang.String"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<doc>
|
|
<![CDATA[Create an immutable {@link PermissionStatus} object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUserName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return user name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getGroupName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return group name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPermission" return="org.apache.hadoop.fs.permission.FsPermission"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return permission]]>
|
|
</doc>
|
|
</method>
|
|
<method name="applyUMask" return="org.apache.hadoop.fs.permission.PermissionStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="umask" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<doc>
|
|
<![CDATA[Apply umask.
|
|
@see FsPermission#applyUMask(FsPermission)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="read" return="org.apache.hadoop.fs.permission.PermissionStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create and initialize a {@link PermissionStatus} from {@link DataInput}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="username" type="java.lang.String"/>
|
|
<param name="groupname" type="java.lang.String"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serialize a {@link PermissionStatus} from its base components.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Store permission related information.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.permission.PermissionStatus -->
|
|
</package>
|
|
<package name="org.apache.hadoop.fs.s3">
|
|
<!-- start class org.apache.hadoop.fs.s3.Block -->
|
|
<class name="Block" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Block" type="long, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getId" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Holds metadata about a block of data being stored in a {@link FileSystemStore}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.Block -->
|
|
<!-- start interface org.apache.hadoop.fs.s3.FileSystemStore -->
|
|
<interface name="FileSystemStore" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getVersion" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="storeINode"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="inode" type="org.apache.hadoop.fs.s3.INode"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="storeBlock"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="block" type="org.apache.hadoop.fs.s3.Block"/>
|
|
<param name="file" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="inodeExists" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="blockExists" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="blockId" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="retrieveINode" return="org.apache.hadoop.fs.s3.INode"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="retrieveBlock" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="block" type="org.apache.hadoop.fs.s3.Block"/>
|
|
<param name="byteRangeStart" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="deleteINode"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="deleteBlock"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="block" type="org.apache.hadoop.fs.s3.Block"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="listSubPaths" return="java.util.Set<org.apache.hadoop.fs.Path>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="listDeepSubPaths" return="java.util.Set<org.apache.hadoop.fs.Path>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="purge"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Delete everything. Used for testing.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="dump"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Diagnostic method to dump all INodes to the console.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A facility for storing and retrieving {@link INode}s and {@link Block}s.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.fs.s3.FileSystemStore -->
|
|
<!-- start class org.apache.hadoop.fs.s3.INode -->
|
|
<class name="INode" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="INode" type="org.apache.hadoop.fs.s3.INode.FileType, org.apache.hadoop.fs.s3.Block[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getBlocks" return="org.apache.hadoop.fs.s3.Block[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getFileType" return="org.apache.hadoop.fs.s3.INode.FileType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="isDirectory" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="isFile" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getSerializedLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="serialize" return="java.io.InputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="deserialize" return="org.apache.hadoop.fs.s3.INode"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="FILE_TYPES" type="org.apache.hadoop.fs.s3.INode.FileType[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DIRECTORY_INODE" type="org.apache.hadoop.fs.s3.INode"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Holds file metadata including type (regular file, or directory),
|
|
and the list of blocks that are pointers to the data.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.INode -->
|
|
<!-- start class org.apache.hadoop.fs.s3.MigrationTool -->
|
|
<class name="MigrationTool" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="MigrationTool"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
This class is a tool for migrating data from an older to a newer version
|
|
of an S3 filesystem.
|
|
</p>
|
|
<p>
|
|
All files in the filesystem are migrated by re-writing the block metadata
|
|
- no datafiles are touched.
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.MigrationTool -->
|
|
<!-- start class org.apache.hadoop.fs.s3.S3Credentials -->
|
|
<class name="S3Credentials" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="S3Credentials"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[@throws IllegalArgumentException if credentials for S3 cannot be
|
|
determined.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAccessKey" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getSecretAccessKey" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Extracts AWS credentials from the filesystem URI or configuration.
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.S3Credentials -->
|
|
<!-- start class org.apache.hadoop.fs.s3.S3Exception -->
|
|
<class name="S3Exception" extends="java.lang.RuntimeException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="S3Exception" type="java.lang.Throwable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Thrown if there is a problem communicating with Amazon S3.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.S3Exception -->
|
|
<!-- start class org.apache.hadoop.fs.s3.S3FileSystem -->
|
|
<class name="S3FileSystem" extends="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="S3FileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="S3FileSystem" type="org.apache.hadoop.fs.s3.FileSystemStore"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@param permission Currently ignored.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isFile" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This optional operation is not yet supported.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@param permission Currently ignored.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[FileStatus for S3 file systems.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A block-based {@link FileSystem} backed by
|
|
<a href="http://aws.amazon.com/s3">Amazon S3</a>.
|
|
</p>
|
|
@see NativeS3FileSystem]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.S3FileSystem -->
|
|
<!-- start class org.apache.hadoop.fs.s3.S3FileSystemException -->
|
|
<class name="S3FileSystemException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="S3FileSystemException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Thrown when there is a fatal exception while using {@link S3FileSystem}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.S3FileSystemException -->
|
|
<!-- start class org.apache.hadoop.fs.s3.VersionMismatchException -->
|
|
<class name="VersionMismatchException" extends="org.apache.hadoop.fs.s3.S3FileSystemException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="VersionMismatchException" type="java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Thrown when Hadoop cannot read the version of the data stored
|
|
in {@link S3FileSystem}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3.VersionMismatchException -->
|
|
</package>
|
|
<package name="org.apache.hadoop.fs.s3native">
|
|
<!-- start class org.apache.hadoop.fs.s3native.NativeS3FileSystem -->
|
|
<class name="NativeS3FileSystem" extends="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="NativeS3FileSystem"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="NativeS3FileSystem" type="org.apache.hadoop.fs.s3native.NativeFileSystemStore"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.net.URI"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="append" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This optional operation is not yet supported.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<param name="overwrite" type="boolean"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="delete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="recursive" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getFileStatus" return="org.apache.hadoop.fs.FileStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getUri" return="java.net.URI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
If <code>f</code> is a file, this method will make a single call to S3.
|
|
If <code>f</code> is a directory, this method will make a maximum of
|
|
(<i>n</i> / 1000) + 2 calls to S3, where <i>n</i> is the total number of
|
|
files and directories contained directly in <code>f</code>.
|
|
</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mkdirs" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="permission" type="org.apache.hadoop.fs.permission.FsPermission"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="rename" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="dst" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newDir" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the working directory to the given directory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A {@link FileSystem} for reading and writing files stored on
|
|
<a href="http://aws.amazon.com/s3">Amazon S3</a>.
|
|
Unlike {@link org.apache.hadoop.fs.s3.S3FileSystem} this implementation
|
|
stores files on S3 in their
|
|
native form so they can be read by other S3 tools.
|
|
</p>
|
|
@see org.apache.hadoop.fs.s3.S3FileSystem]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.s3native.NativeS3FileSystem -->
|
|
</package>
|
|
<package name="org.apache.hadoop.fs.shell">
|
|
<!-- start class org.apache.hadoop.fs.shell.Command -->
|
|
<class name="Command" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Command" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getCommandName" return="java.lang.String"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the command's name excluding the leading character -]]>
|
|
</doc>
|
|
</method>
|
|
<method name="run"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Execute the command on the input path
|
|
|
|
@param path the input path
|
|
@throws IOException if any error occurs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="runAll" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[For each source path, execute the command
|
|
|
|
@return 0 if it runs successfully; -1 if it fails]]>
|
|
</doc>
|
|
</method>
|
|
<field name="args" type="java.lang.String[]"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An abstract class for the execution of a file system command]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.shell.Command -->
|
|
<!-- start class org.apache.hadoop.fs.shell.CommandFormat -->
|
|
<class name="CommandFormat" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="CommandFormat" type="java.lang.String, int, int, java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="parse" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<param name="pos" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse parameters starting from the given position
|
|
|
|
@param args an array of input arguments
|
|
@param pos the position at which starts to parse
|
|
@return a list of parameters]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOpt" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="option" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Return if the option is set or not
|
|
|
|
@param option String representation of an option
|
|
@return true is the option is set; false otherwise]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Parse the args of a command and check the format of args.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.shell.CommandFormat -->
|
|
<!-- start class org.apache.hadoop.fs.shell.Count -->
|
|
<class name="Count" extends="org.apache.hadoop.fs.shell.Command"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Count" type="java.lang.String[], int, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor
|
|
|
|
@param cmd the count command
|
|
@param pos the starting index of the arguments]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="matches" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cmd" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Check if a command is the count command
|
|
|
|
@param cmd A string representation of a command starting with "-"
|
|
@return true if this is a count command; false otherwise]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCommandName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="NAME" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="USAGE" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DESCRIPTION" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Count the number of directories, files, bytes, quota, and remaining quota.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.fs.shell.Count -->
|
|
</package>
|
|
<package name="org.apache.hadoop.http">
|
|
<!-- start interface org.apache.hadoop.http.FilterContainer -->
|
|
<interface name="FilterContainer" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="addFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="classname" type="java.lang.String"/>
|
|
<param name="parameters" type="java.util.Map<java.lang.String, java.lang.String>"/>
|
|
<doc>
|
|
<![CDATA[Add a filter to the container.
|
|
@param name Filter name
|
|
@param classname Filter class name
|
|
@param parameters a map from parameter names to initial values]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A container class for javax.servlet.Filter.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.http.FilterContainer -->
|
|
<!-- start class org.apache.hadoop.http.FilterInitializer -->
|
|
<class name="FilterInitializer" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FilterInitializer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Initialize a javax.servlet.Filter.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.http.FilterInitializer -->
|
|
<!-- start class org.apache.hadoop.http.HttpServer -->
|
|
<class name="HttpServer" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.http.FilterContainer"/>
|
|
<constructor name="HttpServer" type="java.lang.String, java.lang.String, int, boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Same as this(name, bindAddress, port, findPort, null);]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="HttpServer" type="java.lang.String, java.lang.String, int, boolean, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a status server on the given port.
|
|
The jsp scripts are taken from src/webapps/<name>.
|
|
@param name The name of the server
|
|
@param port The port to use on the server
|
|
@param findPort whether the server should start at the given port and
|
|
increment by 1 until it finds a free port.
|
|
@param conf Configuration]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addDefaultApps"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="appDir" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add default apps.
|
|
@param appDir The application directory
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addDefaultServlets"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Add default servlets.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addContext"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="pathSpec" type="java.lang.String"/>
|
|
<param name="dir" type="java.lang.String"/>
|
|
<param name="isFiltered" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add a context
|
|
@param pathSpec The path spec for the context
|
|
@param dir The directory containing the context
|
|
@param isFiltered if true, the servlet is added to the filter path mapping
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setAttribute"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="value" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Set a value in the webapp context. These values are available to the jsp
|
|
pages as "application.getAttribute(name)".
|
|
@param name The name of the attribute
|
|
@param value The value of the attribute]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addServlet"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="pathSpec" type="java.lang.String"/>
|
|
<param name="clazz" type="java.lang.Class<? extends javax.servlet.http.HttpServlet>"/>
|
|
<doc>
|
|
<![CDATA[Add a servlet in the server.
|
|
@param name The name of the servlet (can be passed as null)
|
|
@param pathSpec The path spec for the servlet
|
|
@param clazz The servlet class]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addInternalServlet"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="this is a temporary method">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="pathSpec" type="java.lang.String"/>
|
|
<param name="clazz" type="java.lang.Class<? extends javax.servlet.http.HttpServlet>"/>
|
|
<doc>
|
|
<![CDATA[Add an internal servlet in the server.
|
|
@param name The name of the servlet (can be passed as null)
|
|
@param pathSpec The path spec for the servlet
|
|
@param clazz The servlet class
|
|
@deprecated this is a temporary method]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="classname" type="java.lang.String"/>
|
|
<param name="parameters" type="java.util.Map<java.lang.String, java.lang.String>"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="defineFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="ctx" type="org.mortbay.jetty.servlet.WebApplicationContext"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="classname" type="java.lang.String"/>
|
|
<param name="parameters" type="java.util.Map<java.lang.String, java.lang.String>"/>
|
|
<param name="urls" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Define a filter for a context and set up default url mappings.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addFilterPathMapping"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="pathSpec" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Add the path spec to the filter path mapping.
|
|
@param pathSpec The path spec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAttribute" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the value in the webapp context.
|
|
@param name The name of the attribute
|
|
@return The value of the attribute]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getWebAppsPath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the pathname to the webapps files.
|
|
@return the pathname as a URL
|
|
@throws IOException if 'webapps' directory cannot be found on CLASSPATH.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPort" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the port that the server is on
|
|
@return the port]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setThreads"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="min" type="int"/>
|
|
<param name="max" type="int"/>
|
|
</method>
|
|
<method name="addSslListener"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="addr" type="java.net.InetSocketAddress"/>
|
|
<param name="keystore" type="java.lang.String"/>
|
|
<param name="storPass" type="java.lang.String"/>
|
|
<param name="keyPass" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Configure an ssl listener on the server.
|
|
@param addr address to listen on
|
|
@param keystore location of the keystore
|
|
@param storPass password for the keystore
|
|
@param keyPass password for the key]]>
|
|
</doc>
|
|
</method>
|
|
<method name="start"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Start the server. Does not wait for the server to start.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stop"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<doc>
|
|
<![CDATA[stop the server]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="webServer" type="org.mortbay.jetty.Server"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="webAppContext" type="org.mortbay.jetty.servlet.WebApplicationContext"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="defaultContexts" type="java.util.Map<org.mortbay.jetty.servlet.WebApplicationContext, java.lang.Boolean>"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="findPort" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="listener" type="org.mortbay.http.SocketListener"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="filterNames" type="java.util.List<java.lang.String>"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Create a Jetty embedded server to answer http requests. The primary goal
|
|
is to serve up status information for the server.
|
|
There are three contexts:
|
|
"/logs/" -> points to the log directory
|
|
"/static/" -> points to common static files (src/webapps/static)
|
|
"/" -> the jsp server code from (src/webapps/<name>)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.http.HttpServer -->
|
|
<!-- start class org.apache.hadoop.http.HttpServer.StackServlet -->
|
|
<class name="HttpServer.StackServlet" extends="javax.servlet.http.HttpServlet"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="HttpServer.StackServlet"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="doGet"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="request" type="javax.servlet.http.HttpServletRequest"/>
|
|
<param name="response" type="javax.servlet.http.HttpServletResponse"/>
|
|
<exception name="ServletException" type="javax.servlet.ServletException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A very simple servlet to serve up a text representation of the current
|
|
stack traces. It both returns the stacks to the caller and logs them.
|
|
Currently the stack traces are done sequentially rather than exactly the
|
|
same data.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.http.HttpServer.StackServlet -->
|
|
</package>
|
|
<package name="org.apache.hadoop.io">
|
|
<!-- start class org.apache.hadoop.io.AbstractMapWritable -->
|
|
<class name="AbstractMapWritable" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="AbstractMapWritable"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[constructor.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addToMap"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="clazz" type="java.lang.Class"/>
|
|
<doc>
|
|
<![CDATA[Add a Class to the maps if it is not already present.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="id" type="byte"/>
|
|
<doc>
|
|
<![CDATA[@return the Class class for the specified id]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getId" return="byte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="clazz" type="java.lang.Class"/>
|
|
<doc>
|
|
<![CDATA[@return the id for the specified Class]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copy"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.io.Writable"/>
|
|
<doc>
|
|
<![CDATA[Used by child copy constructors.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the conf]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[@param conf the conf to set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Abstract base class for MapWritable and SortedMapWritable
|
|
|
|
Unlike org.apache.nutch.crawl.MapWritable, this class allows creation of
|
|
MapWritable<Writable, MapWritable> so the CLASS_TO_ID and ID_TO_CLASS
|
|
maps travel with the class instead of being static.
|
|
|
|
Class ids range from 1 to 127 so there can be at most 127 distinct classes
|
|
in any specific map instance.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.AbstractMapWritable -->
|
|
<!-- start class org.apache.hadoop.io.ArrayFile -->
|
|
<class name="ArrayFile" extends="org.apache.hadoop.io.MapFile"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ArrayFile"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[A dense file-based mapping from integers to values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.ArrayFile -->
|
|
<!-- start class org.apache.hadoop.io.ArrayFile.Reader -->
|
|
<class name="ArrayFile.Reader" extends="org.apache.hadoop.io.MapFile.Reader"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ArrayFile.Reader" type="org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct an array reader for the named file.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Positions the reader before its <code>n</code>th value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read and return the next value in the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="key" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns the key associated with the most recent call to {@link
|
|
#seek(long)}, {@link #next(Writable)}, or {@link
|
|
#get(long,Writable)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="long"/>
|
|
<param name="value" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the <code>n</code>th value in the file.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Provide access to an existing array file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.ArrayFile.Reader -->
|
|
<!-- start class org.apache.hadoop.io.ArrayFile.Writer -->
|
|
<class name="ArrayFile.Writer" extends="org.apache.hadoop.io.MapFile.Writer"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ArrayFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.Writable>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named file for values of the named class.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ArrayFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.Writable>, org.apache.hadoop.io.SequenceFile.CompressionType, org.apache.hadoop.util.Progressable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named file for values of the named class.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append a value to the file.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Write a new array file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.ArrayFile.Writer -->
|
|
<!-- start class org.apache.hadoop.io.ArrayWritable -->
|
|
<class name="ArrayWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="ArrayWritable" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="ArrayWritable" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>, org.apache.hadoop.io.Writable[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="ArrayWritable" type="java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getValueClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="toStrings" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="toArray" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="values" type="org.apache.hadoop.io.Writable[]"/>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Writable for arrays containing instances of a class. The elements of this
|
|
writable must all be instances of the same class. If this writable will be
|
|
the input for a Reducer, you will need to create a subclass that sets the
|
|
value to be of the proper type.
|
|
|
|
For example:
|
|
<code>
|
|
public class IntArrayWritable extends ArrayWritable {
|
|
public IntArrayWritable() {
|
|
super(IntWritable.class);
|
|
}
|
|
}
|
|
</code>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.ArrayWritable -->
|
|
<!-- start class org.apache.hadoop.io.BinaryComparable -->
|
|
<class name="BinaryComparable" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Comparable<org.apache.hadoop.io.BinaryComparable>"/>
|
|
<constructor name="BinaryComparable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getLength" return="int"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return n st bytes 0..n-1 from {#getBytes()} are valid.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytes" return="byte[]"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return representative byte array for this instance.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.io.BinaryComparable"/>
|
|
<doc>
|
|
<![CDATA[Compare bytes from {#getBytes()}.
|
|
@see org.apache.hadoop.io.WritableComparator#compareBytes(byte[],int,int,byte[],int,int)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Compare bytes from {#getBytes()} to those provided.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Return true if bytes from {#getBytes()} match.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return a hash of the bytes returned from {#getBytes()}.
|
|
@see org.apache.hadoop.io.WritableComparator#hashBytes(byte[],int)]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface supported by {@link org.apache.hadoop.io.WritableComparable}
|
|
types supporting ordering/permutation by a representative set of bytes.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.BinaryComparable -->
|
|
<!-- start class org.apache.hadoop.io.BooleanWritable -->
|
|
<class name="BooleanWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="BooleanWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="BooleanWritable" type="boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set the value of the BooleanWritable]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the value of the BooleanWritable]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for booleans.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.BooleanWritable -->
|
|
<!-- start class org.apache.hadoop.io.BooleanWritable.Comparator -->
|
|
<class name="BooleanWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="BooleanWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator optimized for BooleanWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.BooleanWritable.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.BytesWritable -->
|
|
<class name="BytesWritable" extends="org.apache.hadoop.io.BinaryComparable"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable<org.apache.hadoop.io.BinaryComparable>"/>
|
|
<constructor name="BytesWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a zero-size sequence.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="BytesWritable" type="byte[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a BytesWritable using the byte array as the initial value.
|
|
@param bytes This array becomes the backing storage for the object.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getBytes" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the data from the BytesWritable.
|
|
@return The data is only valid between 0 and getLength() - 1.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use {@link #getBytes()} instead.">
|
|
<doc>
|
|
<![CDATA[Get the data from the BytesWritable.
|
|
@deprecated Use {@link #getBytes()} instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the current size of the buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Use {@link #getLength()} instead.">
|
|
<doc>
|
|
<![CDATA[Get the current size of the buffer.
|
|
@deprecated Use {@link #getLength()} instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="size" type="int"/>
|
|
<doc>
|
|
<![CDATA[Change the size of the buffer. The values in the old range are preserved
|
|
and any new values are undefined. The capacity is changed if it is
|
|
necessary.
|
|
@param size The new number of bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCapacity" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the capacity, which is the maximum size that could handled without
|
|
resizing the backing storage.
|
|
@return The number of bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCapacity"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="new_cap" type="int"/>
|
|
<doc>
|
|
<![CDATA[Change the capacity of the backing storage.
|
|
The data is preserved.
|
|
@param new_cap The new capacity in bytes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newData" type="org.apache.hadoop.io.BytesWritable"/>
|
|
<doc>
|
|
<![CDATA[Set the BytesWritable to the contents of the given newData.
|
|
@param newData the value to set this BytesWritable to.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newData" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the value to a copy of the given byte range
|
|
@param newData the new values to copy in
|
|
@param offset the offset in newData to start at
|
|
@param length the number of bytes to copy]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="right_obj" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Are the two byte sequences equal?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Generate the stream of bytes as hex pairs separated by ' '.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A byte sequence that is usable as a key or value.
|
|
It is resizable and distinguishes between the size of the seqeunce and
|
|
the current capacity. The hash function is the front of the md5 of the
|
|
buffer. The sort order is the same as memcmp.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.BytesWritable -->
|
|
<!-- start class org.apache.hadoop.io.BytesWritable.Comparator -->
|
|
<class name="BytesWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="BytesWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
<doc>
|
|
<![CDATA[Compare the buffers in serialized form.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator optimized for BytesWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.BytesWritable.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.ByteWritable -->
|
|
<class name="ByteWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="ByteWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="ByteWritable" type="byte"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Set the value of this ByteWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="byte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the value of this ByteWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a ByteWritable with the same value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compares two ByteWritables.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for a single byte.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.ByteWritable -->
|
|
<!-- start class org.apache.hadoop.io.ByteWritable.Comparator -->
|
|
<class name="ByteWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ByteWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator optimized for ByteWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.ByteWritable.Comparator -->
|
|
<!-- start interface org.apache.hadoop.io.Closeable -->
|
|
<interface name="Closeable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use java.io.Closeable">
|
|
<implements name="java.io.Closeable"/>
|
|
<doc>
|
|
<![CDATA[@deprecated use java.io.Closeable]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.Closeable -->
|
|
<!-- start class org.apache.hadoop.io.CompressedWritable -->
|
|
<class name="CompressedWritable" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="CompressedWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="ensureInflated"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Must be called by all methods which access fields to ensure that the data
|
|
has been uncompressed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFieldsCompressed"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Subclasses implement this instead of {@link #readFields(DataInput)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeCompressed"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Subclasses implement this instead of {@link #write(DataOutput)}.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A base-class for Writables which store themselves compressed and lazily
|
|
inflate on field access. This is useful for large objects whose fields are
|
|
not be altered during a map or reduce operation: leaving the field data
|
|
compressed makes copying the instance from one file to another much
|
|
faster.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.CompressedWritable -->
|
|
<!-- start class org.apache.hadoop.io.DataInputBuffer -->
|
|
<class name="DataInputBuffer" extends="java.io.DataInputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DataInputBuffer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a new empty buffer.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="byte[]"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Resets the data that the buffer reads.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Resets the data that the buffer reads.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getData" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getPosition" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the current position in the input.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the length of the input.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A reusable {@link DataInput} implementation that reads from an in-memory
|
|
buffer.
|
|
|
|
<p>This saves memory over creating a new DataInputStream and
|
|
ByteArrayInputStream each time data is read.
|
|
|
|
<p>Typical usage is something like the following:<pre>
|
|
|
|
DataInputBuffer buffer = new DataInputBuffer();
|
|
while (... loop condition ...) {
|
|
byte[] data = ... get data ...;
|
|
int dataLength = ... get data length ...;
|
|
buffer.reset(data, dataLength);
|
|
... read buffer using DataInput methods ...
|
|
}
|
|
</pre>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.DataInputBuffer -->
|
|
<!-- start class org.apache.hadoop.io.DataOutputBuffer -->
|
|
<class name="DataOutputBuffer" extends="java.io.DataOutputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DataOutputBuffer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a new empty buffer.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="DataOutputBuffer" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getData" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the current contents of the buffer.
|
|
Data is only valid to {@link #getLength()}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the length of the valid data currently in the buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset" return="org.apache.hadoop.io.DataOutputBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resets the buffer to empty.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes bytes from a DataInput directly into the buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeTo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write to a file stream]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A reusable {@link DataOutput} implementation that writes to an in-memory
|
|
buffer.
|
|
|
|
<p>This saves memory over creating a new DataOutputStream and
|
|
ByteArrayOutputStream each time data is written.
|
|
|
|
<p>Typical usage is something like the following:<pre>
|
|
|
|
DataOutputBuffer buffer = new DataOutputBuffer();
|
|
while (... loop condition ...) {
|
|
buffer.reset();
|
|
... write buffer using DataOutput methods ...
|
|
byte[] data = buffer.getData();
|
|
int dataLength = buffer.getLength();
|
|
... write data to its ultimate destination ...
|
|
}
|
|
</pre>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.DataOutputBuffer -->
|
|
<!-- start class org.apache.hadoop.io.DefaultStringifier -->
|
|
<class name="DefaultStringifier" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Stringifier<T>"/>
|
|
<constructor name="DefaultStringifier" type="org.apache.hadoop.conf.Configuration, java.lang.Class<T>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="fromString" return="T"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="obj" type="T"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="store"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="item" type="K"/>
|
|
<param name="keyName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Stores the item in the configuration with the given keyName.
|
|
|
|
@param <K> the class of the item
|
|
@param conf the configuration to store
|
|
@param item the object to be stored
|
|
@param keyName the name of the key to use
|
|
@throws IOException : forwards Exceptions from the underlying
|
|
{@link Serialization} classes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="load" return="K"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="keyName" type="java.lang.String"/>
|
|
<param name="itemClass" type="java.lang.Class<K>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Restores the object from the configuration.
|
|
|
|
@param <K> the class of the item
|
|
@param conf the configuration to use
|
|
@param keyName the name of the key to use
|
|
@param itemClass the class of the item
|
|
@return restored object
|
|
@throws IOException : forwards Exceptions from the underlying
|
|
{@link Serialization} classes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="storeArray"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="items" type="K[]"/>
|
|
<param name="keyName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Stores the array of items in the configuration with the given keyName.
|
|
|
|
@param <K> the class of the item
|
|
@param conf the configuration to use
|
|
@param items the objects to be stored
|
|
@param keyName the name of the key to use
|
|
@throws IndexOutOfBoundsException if the items array is empty
|
|
@throws IOException : forwards Exceptions from the underlying
|
|
{@link Serialization} classes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="loadArray" return="K[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="keyName" type="java.lang.String"/>
|
|
<param name="itemClass" type="java.lang.Class<K>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Restores the array of objects from the configuration.
|
|
|
|
@param <K> the class of the item
|
|
@param conf the configuration to use
|
|
@param keyName the name of the key to use
|
|
@param itemClass the class of the item
|
|
@return restored object
|
|
@throws IOException : forwards Exceptions from the underlying
|
|
{@link Serialization} classes.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[DefaultStringifier is the default implementation of the {@link Stringifier}
|
|
interface which stringifies the objects using base64 encoding of the
|
|
serialized version of the objects. The {@link Serializer} and
|
|
{@link Deserializer} are obtained from the {@link SerializationFactory}.
|
|
<br>
|
|
DefaultStringifier offers convenience methods to store/load objects to/from
|
|
the configuration.
|
|
|
|
@param <T> the class of the objects to stringify]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.DefaultStringifier -->
|
|
<!-- start class org.apache.hadoop.io.DoubleWritable -->
|
|
<class name="DoubleWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="DoubleWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="DoubleWritable" type="double"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="double"/>
|
|
</method>
|
|
<method name="get" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a DoubleWritable with the same value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Writable for Double values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.DoubleWritable -->
|
|
<!-- start class org.apache.hadoop.io.DoubleWritable.Comparator -->
|
|
<class name="DoubleWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DoubleWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator optimized for DoubleWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.DoubleWritable.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.FloatWritable -->
|
|
<class name="FloatWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="FloatWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FloatWritable" type="float"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="float"/>
|
|
<doc>
|
|
<![CDATA[Set the value of this FloatWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the value of this FloatWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a FloatWritable with the same value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compares two FloatWritables.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for floats.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.FloatWritable -->
|
|
<!-- start class org.apache.hadoop.io.FloatWritable.Comparator -->
|
|
<class name="FloatWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FloatWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator optimized for FloatWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.FloatWritable.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.GenericWritable -->
|
|
<class name="GenericWritable" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="GenericWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="obj" type="org.apache.hadoop.io.Writable"/>
|
|
<doc>
|
|
<![CDATA[Set the instance that is wrapped.
|
|
|
|
@param obj]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the wrapped instance.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getTypes" return="java.lang.Class[]"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return all classes that may be wrapped. Subclasses should implement this
|
|
to return a constant array of classes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A wrapper for Writable instances.
|
|
<p>
|
|
When two sequence files, which have same Key type but different Value
|
|
types, are mapped out to reduce, multiple Value types is not allowed.
|
|
In this case, this class can help you wrap instances with different types.
|
|
</p>
|
|
|
|
<p>
|
|
Compared with <code>ObjectWritable</code>, this class is much more effective,
|
|
because <code>ObjectWritable</code> will append the class declaration as a String
|
|
into the output file in every Key-Value pair.
|
|
</p>
|
|
|
|
<p>
|
|
Generic Writable implements {@link Configurable} interface, so that it will be
|
|
configured by the framework. The configuration is passed to the wrapped objects
|
|
implementing {@link Configurable} interface <i>before deserialization</i>.
|
|
</p>
|
|
|
|
how to use it: <br>
|
|
1. Write your own class, such as GenericObject, which extends GenericWritable.<br>
|
|
2. Implements the abstract method <code>getTypes()</code>, defines
|
|
the classes which will be wrapped in GenericObject in application.
|
|
Attention: this classes defined in <code>getTypes()</code> method, must
|
|
implement <code>Writable</code> interface.
|
|
<br><br>
|
|
|
|
The code looks like this:
|
|
<blockquote><pre>
|
|
public class GenericObject extends GenericWritable {
|
|
|
|
private static Class[] CLASSES = {
|
|
ClassType1.class,
|
|
ClassType2.class,
|
|
ClassType3.class,
|
|
};
|
|
|
|
protected Class[] getTypes() {
|
|
return CLASSES;
|
|
}
|
|
|
|
}
|
|
</pre></blockquote>
|
|
|
|
@since Nov 8, 2006]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.GenericWritable -->
|
|
<!-- start class org.apache.hadoop.io.InputBuffer -->
|
|
<class name="InputBuffer" extends="java.io.FilterInputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="InputBuffer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a new empty buffer.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="byte[]"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Resets the data that the buffer reads.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Resets the data that the buffer reads.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPosition" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the current position in the input.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the length of the input.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A reusable {@link InputStream} implementation that reads from an in-memory
|
|
buffer.
|
|
|
|
<p>This saves memory over creating a new InputStream and
|
|
ByteArrayInputStream each time data is read.
|
|
|
|
<p>Typical usage is something like the following:<pre>
|
|
|
|
InputBuffer buffer = new InputBuffer();
|
|
while (... loop condition ...) {
|
|
byte[] data = ... get data ...;
|
|
int dataLength = ... get data length ...;
|
|
buffer.reset(data, dataLength);
|
|
... read buffer using InputStream methods ...
|
|
}
|
|
</pre>
|
|
@see DataInputBuffer
|
|
@see DataOutput]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.InputBuffer -->
|
|
<!-- start class org.apache.hadoop.io.IntWritable -->
|
|
<class name="IntWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="IntWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="IntWritable" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the value of this IntWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the value of this IntWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a IntWritable with the same value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compares two IntWritables.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for ints.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.IntWritable -->
|
|
<!-- start class org.apache.hadoop.io.IntWritable.Comparator -->
|
|
<class name="IntWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="IntWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator optimized for IntWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.IntWritable.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.IOUtils -->
|
|
<class name="IOUtils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="IOUtils"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="copyBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="buffSize" type="int"/>
|
|
<param name="close" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copies from one stream to another.
|
|
@param in InputStrem to read from
|
|
@param out OutputStream to write to
|
|
@param buffSize the size of the buffer
|
|
@param close whether or not close the InputStream and
|
|
OutputStream at the end. The streams are closed in the finally clause.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copies from one stream to another. <strong>closes the input and output streams
|
|
at the end</strong>.
|
|
@param in InputStrem to read from
|
|
@param out OutputStream to write to
|
|
@param conf the Configuration object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copyBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="close" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Copies from one stream to another.
|
|
@param in InputStrem to read from
|
|
@param out OutputStream to write to
|
|
@param conf the Configuration object
|
|
@param close whether or not close the InputStream and
|
|
OutputStream at the end. The streams are closed in the finally clause.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="buf" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads len bytes in a loop.
|
|
@param in The InputStream to read from
|
|
@param buf The buffer to fill
|
|
@param off offset from the buffer
|
|
@param len the length of bytes to read
|
|
@throws IOException if it could not read requested number of bytes
|
|
for any reason (including EOF)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="skipFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="len" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Similar to readFully(). Skips bytes in a loop.
|
|
@param in The InputStream to skip bytes from
|
|
@param len number of bytes to skip.
|
|
@throws IOException if it could not skip requested number of bytes
|
|
for any reason (including EOF)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="cleanup"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="log" type="org.apache.commons.logging.Log"/>
|
|
<param name="closeables" type="java.io.Closeable[]"/>
|
|
<doc>
|
|
<![CDATA[Close the Closeable objects and <b>ignore</b> any {@link IOException} or
|
|
null pointers. Must only be used for cleanup in exception handlers.
|
|
@param log the log to record problems to at debug level. Can be null.
|
|
@param closeables the objects to close]]>
|
|
</doc>
|
|
</method>
|
|
<method name="closeStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.Closeable"/>
|
|
<doc>
|
|
<![CDATA[Closes the stream ignoring {@link IOException}.
|
|
Must only be called in cleaning up from exception handlers.
|
|
@param stream the Stream to close]]>
|
|
</doc>
|
|
</method>
|
|
<method name="closeSocket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="sock" type="java.net.Socket"/>
|
|
<doc>
|
|
<![CDATA[Closes the socket ignoring {@link IOException}
|
|
@param sock the Socket to close]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An utility class for I/O related functionality.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.IOUtils -->
|
|
<!-- start class org.apache.hadoop.io.IOUtils.NullOutputStream -->
|
|
<class name="IOUtils.NullOutputStream" extends="java.io.OutputStream"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="IOUtils.NullOutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[/dev/null of OutputStreams.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.IOUtils.NullOutputStream -->
|
|
<!-- start class org.apache.hadoop.io.LongWritable -->
|
|
<class name="LongWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="LongWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="LongWritable" type="long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the value of this LongWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the value of this LongWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a LongWritable with the same value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compares two LongWritables.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for longs.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.LongWritable -->
|
|
<!-- start class org.apache.hadoop.io.LongWritable.Comparator -->
|
|
<class name="LongWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LongWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator optimized for LongWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.LongWritable.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.LongWritable.DecreasingComparator -->
|
|
<class name="LongWritable.DecreasingComparator" extends="org.apache.hadoop.io.LongWritable.Comparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LongWritable.DecreasingComparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="a" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="b" type="org.apache.hadoop.io.WritableComparable"/>
|
|
</method>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A decreasing Comparator optimized for LongWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.LongWritable.DecreasingComparator -->
|
|
<!-- start class org.apache.hadoop.io.MapFile -->
|
|
<class name="MapFile" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MapFile"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="rename"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="oldName" type="java.lang.String"/>
|
|
<param name="newName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Renames an existing map directory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="delete"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Deletes the named map file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fix" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>"/>
|
|
<param name="valueClass" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>"/>
|
|
<param name="dryrun" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[This method attempts to fix a corrupt MapFile by re-creating its index.
|
|
@param fs filesystem
|
|
@param dir directory containing the MapFile data and index
|
|
@param keyClass key class (has to be a subclass of Writable)
|
|
@param valueClass value class (has to be a subclass of Writable)
|
|
@param dryrun do not perform any changes, just report what needs to be done
|
|
@return number of valid entries in this MapFile, or -1 if no fixing was needed
|
|
@throws Exception]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<field name="INDEX_FILE_NAME" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The name of the index file.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="DATA_FILE_NAME" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The name of the data file.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A file-based map from keys to values.
|
|
|
|
<p>A map is a directory containing two files, the <code>data</code> file,
|
|
containing all keys and values in the map, and a smaller <code>index</code>
|
|
file, containing a fraction of the keys. The fraction is determined by
|
|
{@link Writer#getIndexInterval()}.
|
|
|
|
<p>The index file is read entirely into memory. Thus key implementations
|
|
should try to keep themselves small.
|
|
|
|
<p>Map files are created by adding entries in-order. To maintain a large
|
|
database, perform updates by copying the previous version of a database and
|
|
merging in a sorted change list, to create a new version of the database in
|
|
a new file. Sorting large change lists can be done with {@link
|
|
SequenceFile.Sorter}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.MapFile -->
|
|
<!-- start class org.apache.hadoop.io.MapFile.Reader -->
|
|
<class name="MapFile.Reader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.io.Closeable"/>
|
|
<constructor name="MapFile.Reader" type="org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a map reader for the named map.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Reader" type="org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a map reader for the named map using the named comparator.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Reader" type="org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, org.apache.hadoop.conf.Configuration, boolean"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Hook to allow subclasses to defer opening streams until further
|
|
initialization is complete.
|
|
@see #createDataFileReader(FileSystem, Path, Configuration)]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getKeyClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the class of keys in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValueClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the class of values in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="open"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dirName" type="java.lang.String"/>
|
|
<param name="comparator" type="org.apache.hadoop.io.WritableComparator"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createDataFileReader" return="org.apache.hadoop.io.SequenceFile.Reader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dataFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Override this method to specialize the type of
|
|
{@link SequenceFile.Reader} returned.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Re-positions the reader before its first key.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="midKey" return="org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the key at approximately the middle of the file.
|
|
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="finalKey"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads the final key from the file.
|
|
|
|
@param key key to read into]]>
|
|
</doc>
|
|
</method>
|
|
<method name="seek" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Positions the reader at the named key, or if none such exists, at the
|
|
first entry after the named key. Returns true iff the named key exists
|
|
in this map.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the next key/value pair in the map into <code>key</code> and
|
|
<code>val</code>. Returns true if such a pair exists and false when at
|
|
the end of the map]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the value for the named key, or null if none exists.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClosest" return="org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Finds the record that is the closest match to the specified key.
|
|
Returns <code>key</code> or if it does not exist, at the first entry
|
|
after the named key.
|
|
|
|
- * @param key - key that we're trying to find
|
|
- * @param val - data value if key is found
|
|
- * @return - the key that was the closest match or null if eof.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClosest" return="org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="before" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Finds the record that is the closest match to the specified key.
|
|
|
|
@param key - key that we're trying to find
|
|
@param val - data value if key is found
|
|
@param before - IF true, and <code>key</code> does not exist, return
|
|
the first entry that falls just before the <code>key</code>. Otherwise,
|
|
return the record that sorts just after.
|
|
@return - the key that was the closest match or null if eof.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close the map.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Provide access to an existing map.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.MapFile.Reader -->
|
|
<!-- start class org.apache.hadoop.io.MapFile.Writer -->
|
|
<class name="MapFile.Writer" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.io.Closeable"/>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>, java.lang.Class"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map for keys of the named class.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>, java.lang.Class, org.apache.hadoop.io.SequenceFile.CompressionType, org.apache.hadoop.util.Progressable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map for keys of the named class.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>, java.lang.Class, org.apache.hadoop.io.SequenceFile.CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map for keys of the named class.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>, java.lang.Class, org.apache.hadoop.io.SequenceFile.CompressionType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map for keys of the named class.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, java.lang.Class"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map using the named key comparator.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, java.lang.Class, org.apache.hadoop.io.SequenceFile.CompressionType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map using the named key comparator.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, java.lang.Class, org.apache.hadoop.io.SequenceFile.CompressionType, org.apache.hadoop.util.Progressable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map using the named key comparator.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, java.lang.Class, org.apache.hadoop.io.SequenceFile.CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named map using the named key comparator.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getIndexInterval" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of entries that are added before an index entry is added.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setIndexInterval"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="interval" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the index interval.
|
|
@see #getIndexInterval()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setIndexInterval"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="interval" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the index interval and stores it in conf
|
|
@see #getIndexInterval()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close the map.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append a key/value pair to the map. The key must be greater or equal
|
|
to the previous key added to the map.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Writes a new map.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.MapFile.Writer -->
|
|
<!-- start class org.apache.hadoop.io.MapWritable -->
|
|
<class name="MapWritable" extends="org.apache.hadoop.io.AbstractMapWritable"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.util.Map<org.apache.hadoop.io.Writable, org.apache.hadoop.io.Writable>"/>
|
|
<constructor name="MapWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default constructor.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MapWritable" type="org.apache.hadoop.io.MapWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Copy constructor.
|
|
|
|
@param other the map to copy from]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="containsKey" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="containsValue" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="entrySet" return="java.util.Set<java.util.Map.Entry<org.apache.hadoop.io.Writable, org.apache.hadoop.io.Writable>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isEmpty" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="keySet" return="java.util.Set<org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="put" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="value" type="org.apache.hadoop.io.Writable"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="putAll"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="t" type="java.util.Map<? extends org.apache.hadoop.io.Writable, ? extends org.apache.hadoop.io.Writable>"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="size" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="values" return="java.util.Collection<org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Writable Map.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.MapWritable -->
|
|
<!-- start class org.apache.hadoop.io.MD5Hash -->
|
|
<class name="MD5Hash" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable<org.apache.hadoop.io.MD5Hash>"/>
|
|
<constructor name="MD5Hash"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs an MD5Hash.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MD5Hash" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs an MD5Hash from a hex string.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MD5Hash" type="byte[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs an MD5Hash with a specified value.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="org.apache.hadoop.io.MD5Hash"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Constructs, reads and returns an instance.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="that" type="org.apache.hadoop.io.MD5Hash"/>
|
|
<doc>
|
|
<![CDATA[Copy the contents of another instance into this instance.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDigest" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the digest bytes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="digest" return="org.apache.hadoop.io.MD5Hash"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="data" type="byte[]"/>
|
|
<doc>
|
|
<![CDATA[Construct a hash value for a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="digest" return="org.apache.hadoop.io.MD5Hash"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a hash value for the content from the InputStream.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="digest" return="org.apache.hadoop.io.MD5Hash"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="data" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Construct a hash value for a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="digest" return="org.apache.hadoop.io.MD5Hash"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Construct a hash value for a String.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="digest" return="org.apache.hadoop.io.MD5Hash"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="org.apache.hadoop.io.UTF8"/>
|
|
<doc>
|
|
<![CDATA[Construct a hash value for a String.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="halfDigest" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a half-sized version of this MD5. Fits in a long]]>
|
|
</doc>
|
|
</method>
|
|
<method name="quarterDigest" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return a 32-bit digest of the MD5.
|
|
@return the first 4 bytes of the md5]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is an MD5Hash whose digest contains the
|
|
same values.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a hash code value for this object.
|
|
Only uses the first 4 bytes, since md5s are evenly distributed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="that" type="org.apache.hadoop.io.MD5Hash"/>
|
|
<doc>
|
|
<![CDATA[Compares this object with the specified object for order.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a string representation of this object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDigest"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="hex" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets the digest value from a hex string.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="MD5_LEN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A Writable for MD5 hash values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.MD5Hash -->
|
|
<!-- start class org.apache.hadoop.io.MD5Hash.Comparator -->
|
|
<class name="MD5Hash.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MD5Hash.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparator optimized for MD5Hash keys.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.MD5Hash.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.MultipleIOException -->
|
|
<class name="MultipleIOException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getExceptions" return="java.util.List<java.io.IOException>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the underlying exceptions]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createIOException" return="java.io.IOException"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="exceptions" type="java.util.List<java.io.IOException>"/>
|
|
<doc>
|
|
<![CDATA[A convenient method to create an {@link IOException}.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Encapsulate a list of {@link IOException} into an {@link IOException}]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.MultipleIOException -->
|
|
<!-- start class org.apache.hadoop.io.NullWritable -->
|
|
<class name="NullWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<method name="get" return="org.apache.hadoop.io.NullWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the single instance of this class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Singleton Writable with no data.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.NullWritable -->
|
|
<!-- start class org.apache.hadoop.io.NullWritable.Comparator -->
|
|
<class name="NullWritable.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="NullWritable.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
<doc>
|
|
<![CDATA[Compare the buffers in serialized form.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator "optimized" for NullWritable.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.NullWritable.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.ObjectWritable -->
|
|
<class name="ObjectWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="ObjectWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="ObjectWritable" type="java.lang.Object"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="ObjectWritable" type="java.lang.Class, java.lang.Object"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="get" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the instance, or null if none.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDeclaredClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the class this is meant to be.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="instance" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Reset the instance.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeObject"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="instance" type="java.lang.Object"/>
|
|
<param name="declaredClass" type="java.lang.Class"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a {@link Writable}, {@link String}, primitive type, or an array of
|
|
the preceding.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readObject" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a {@link Writable}, {@link String}, primitive type, or an array of
|
|
the preceding.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readObject" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<param name="objectWritable" type="org.apache.hadoop.io.ObjectWritable"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a {@link Writable}, {@link String}, primitive type, or an array of
|
|
the preceding.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A polymorphic Writable that writes an instance with it's class name.
|
|
Handles arrays, strings and primitive types without a Writable wrapper.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.ObjectWritable -->
|
|
<!-- start class org.apache.hadoop.io.OutputBuffer -->
|
|
<class name="OutputBuffer" extends="java.io.FilterOutputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="OutputBuffer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a new empty buffer.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getData" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the current contents of the buffer.
|
|
Data is only valid to {@link #getLength()}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the length of the valid data currently in the buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset" return="org.apache.hadoop.io.OutputBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resets the buffer to empty.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes bytes from a InputStream directly into the buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A reusable {@link OutputStream} implementation that writes to an in-memory
|
|
buffer.
|
|
|
|
<p>This saves memory over creating a new OutputStream and
|
|
ByteArrayOutputStream each time data is written.
|
|
|
|
<p>Typical usage is something like the following:<pre>
|
|
|
|
OutputBuffer buffer = new OutputBuffer();
|
|
while (... loop condition ...) {
|
|
buffer.reset();
|
|
... write buffer using OutputStream methods ...
|
|
byte[] data = buffer.getData();
|
|
int dataLength = buffer.getLength();
|
|
... write data to its ultimate destination ...
|
|
}
|
|
</pre>
|
|
@see DataOutputBuffer
|
|
@see InputBuffer]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.OutputBuffer -->
|
|
<!-- start interface org.apache.hadoop.io.RawComparator -->
|
|
<interface name="RawComparator" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.util.Comparator<T>"/>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A {@link Comparator} that operates directly on byte representations of
|
|
objects.
|
|
</p>
|
|
@param <T>
|
|
@see DeserializerComparator]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.RawComparator -->
|
|
<!-- start class org.apache.hadoop.io.SequenceFile -->
|
|
<class name="SequenceFile" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getCompressionType" return="org.apache.hadoop.io.SequenceFile.CompressionType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link org.apache.hadoop.mapred.SequenceFileOutputFormat#getOutputCompressionType(org.apache.hadoop.mapred.JobConf)}
|
|
to get {@link CompressionType} for job-outputs.">
|
|
<param name="job" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the compression type for the reduce outputs
|
|
@param job the job config to look in
|
|
@return the kind of compression to use
|
|
@deprecated Use
|
|
{@link org.apache.hadoop.mapred.SequenceFileOutputFormat#getOutputCompressionType(org.apache.hadoop.mapred.JobConf)}
|
|
to get {@link CompressionType} for job-outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCompressionType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use the one of the many SequenceFile.createWriter methods to specify
|
|
the {@link CompressionType} while creating the {@link SequenceFile} or
|
|
{@link org.apache.hadoop.mapred.SequenceFileOutputFormat#setOutputCompressionType(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.io.SequenceFile.CompressionType)}
|
|
to specify the {@link CompressionType} for job-outputs.
|
|
or">
|
|
<param name="job" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="val" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<doc>
|
|
<![CDATA[Set the compression type for sequence files.
|
|
@param job the configuration to modify
|
|
@param val the new compression type (none, block, record)
|
|
@deprecated Use the one of the many SequenceFile.createWriter methods to specify
|
|
the {@link CompressionType} while creating the {@link SequenceFile} or
|
|
{@link org.apache.hadoop.mapred.SequenceFileOutputFormat#setOutputCompressionType(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.io.SequenceFile.CompressionType)}
|
|
to specify the {@link CompressionType} for job-outputs.
|
|
or]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="name" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of SequenceFile Writer.
|
|
@param fs The configured filesystem.
|
|
@param conf The configuration.
|
|
@param name The name of the file.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="name" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of SequenceFile Writer.
|
|
@param fs The configured filesystem.
|
|
@param conf The configuration.
|
|
@param name The name of the file.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param compressionType The compression type.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="name" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of SequenceFile Writer.
|
|
@param fs The configured filesystem.
|
|
@param conf The configuration.
|
|
@param name The name of the file.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param compressionType The compression type.
|
|
@param progress The Progressable object to track progress.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="name" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of SequenceFile Writer.
|
|
@param fs The configured filesystem.
|
|
@param conf The configuration.
|
|
@param name The name of the file.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param compressionType The compression type.
|
|
@param codec The compression codec.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="name" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<param name="metadata" type="org.apache.hadoop.io.SequenceFile.Metadata"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of SequenceFile Writer.
|
|
@param fs The configured filesystem.
|
|
@param conf The configuration.
|
|
@param name The name of the file.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param compressionType The compression type.
|
|
@param codec The compression codec.
|
|
@param progress The Progressable object to track progress.
|
|
@param metadata The metadata of the file.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="name" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="replication" type="short"/>
|
|
<param name="blockSize" type="long"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<param name="metadata" type="org.apache.hadoop.io.SequenceFile.Metadata"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of SequenceFile Writer.
|
|
@param fs The configured filesystem.
|
|
@param conf The configuration.
|
|
@param name The name of the file.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param bufferSize buffer size for the underlaying outputstream.
|
|
@param replication replication factor for the file.
|
|
@param blockSize block size for the file.
|
|
@param compressionType The compression type.
|
|
@param codec The compression codec.
|
|
@param progress The Progressable object to track progress.
|
|
@param metadata The metadata of the file.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="name" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of SequenceFile Writer.
|
|
@param fs The configured filesystem.
|
|
@param conf The configuration.
|
|
@param name The name of the file.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param compressionType The compression type.
|
|
@param codec The compression codec.
|
|
@param progress The Progressable object to track progress.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="out" type="org.apache.hadoop.fs.FSDataOutputStream"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<param name="metadata" type="org.apache.hadoop.io.SequenceFile.Metadata"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of 'raw' SequenceFile Writer.
|
|
@param conf The configuration.
|
|
@param out The stream on top which the writer is to be constructed.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param compressionType The compression type.
|
|
@param codec The compression codec.
|
|
@param metadata The metadata of the file.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createWriter" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="out" type="org.apache.hadoop.fs.FSDataOutputStream"/>
|
|
<param name="keyClass" type="java.lang.Class"/>
|
|
<param name="valClass" type="java.lang.Class"/>
|
|
<param name="compressionType" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct the preferred type of 'raw' SequenceFile Writer.
|
|
@param conf The configuration.
|
|
@param out The stream on top which the writer is to be constructed.
|
|
@param keyClass The 'key' type.
|
|
@param valClass The 'value' type.
|
|
@param compressionType The compression type.
|
|
@param codec The compression codec.
|
|
@return Returns the handle to the constructed SequenceFile Writer.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<field name="SYNC_INTERVAL" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of bytes between sync points.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[<code>SequenceFile</code>s are flat files consisting of binary key/value
|
|
pairs.
|
|
|
|
<p><code>SequenceFile</code> provides {@link Writer}, {@link Reader} and
|
|
{@link Sorter} classes for writing, reading and sorting respectively.</p>
|
|
|
|
There are three <code>SequenceFile</code> <code>Writer</code>s based on the
|
|
{@link CompressionType} used to compress key/value pairs:
|
|
<ol>
|
|
<li>
|
|
<code>Writer</code> : Uncompressed records.
|
|
</li>
|
|
<li>
|
|
<code>RecordCompressWriter</code> : Record-compressed files, only compress
|
|
values.
|
|
</li>
|
|
<li>
|
|
<code>BlockCompressWriter</code> : Block-compressed files, both keys &
|
|
values are collected in 'blocks'
|
|
separately and compressed. The size of
|
|
the 'block' is configurable.
|
|
</ol>
|
|
|
|
<p>The actual compression algorithm used to compress key and/or values can be
|
|
specified by using the appropriate {@link CompressionCodec}.</p>
|
|
|
|
<p>The recommended way is to use the static <tt>createWriter</tt> methods
|
|
provided by the <code>SequenceFile</code> to chose the preferred format.</p>
|
|
|
|
<p>The {@link Reader} acts as the bridge and can read any of the above
|
|
<code>SequenceFile</code> formats.</p>
|
|
|
|
<h4 id="Formats">SequenceFile Formats</h4>
|
|
|
|
<p>Essentially there are 3 different formats for <code>SequenceFile</code>s
|
|
depending on the <code>CompressionType</code> specified. All of them share a
|
|
<a href="#Header">common header</a> described below.
|
|
|
|
<h5 id="Header">SequenceFile Header</h5>
|
|
<ul>
|
|
<li>
|
|
version - 3 bytes of magic header <b>SEQ</b>, followed by 1 byte of actual
|
|
version number (e.g. SEQ4 or SEQ6)
|
|
</li>
|
|
<li>
|
|
keyClassName -key class
|
|
</li>
|
|
<li>
|
|
valueClassName - value class
|
|
</li>
|
|
<li>
|
|
compression - A boolean which specifies if compression is turned on for
|
|
keys/values in this file.
|
|
</li>
|
|
<li>
|
|
blockCompression - A boolean which specifies if block-compression is
|
|
turned on for keys/values in this file.
|
|
</li>
|
|
<li>
|
|
compression codec - <code>CompressionCodec</code> class which is used for
|
|
compression of keys and/or values (if compression is
|
|
enabled).
|
|
</li>
|
|
<li>
|
|
metadata - {@link Metadata} for this file.
|
|
</li>
|
|
<li>
|
|
sync - A sync marker to denote end of the header.
|
|
</li>
|
|
</ul>
|
|
|
|
<h5 id="#UncompressedFormat">Uncompressed SequenceFile Format</h5>
|
|
<ul>
|
|
<li>
|
|
<a href="#Header">Header</a>
|
|
</li>
|
|
<li>
|
|
Record
|
|
<ul>
|
|
<li>Record length</li>
|
|
<li>Key length</li>
|
|
<li>Key</li>
|
|
<li>Value</li>
|
|
</ul>
|
|
</li>
|
|
<li>
|
|
A sync-marker every few <code>100</code> bytes or so.
|
|
</li>
|
|
</ul>
|
|
|
|
<h5 id="#RecordCompressedFormat">Record-Compressed SequenceFile Format</h5>
|
|
<ul>
|
|
<li>
|
|
<a href="#Header">Header</a>
|
|
</li>
|
|
<li>
|
|
Record
|
|
<ul>
|
|
<li>Record length</li>
|
|
<li>Key length</li>
|
|
<li>Key</li>
|
|
<li><i>Compressed</i> Value</li>
|
|
</ul>
|
|
</li>
|
|
<li>
|
|
A sync-marker every few <code>100</code> bytes or so.
|
|
</li>
|
|
</ul>
|
|
|
|
<h5 id="#BlockCompressedFormat">Block-Compressed SequenceFile Format</h5>
|
|
<ul>
|
|
<li>
|
|
<a href="#Header">Header</a>
|
|
</li>
|
|
<li>
|
|
Record <i>Block</i>
|
|
<ul>
|
|
<li>Compressed key-lengths block-size</li>
|
|
<li>Compressed key-lengths block</li>
|
|
<li>Compressed keys block-size</li>
|
|
<li>Compressed keys block</li>
|
|
<li>Compressed value-lengths block-size</li>
|
|
<li>Compressed value-lengths block</li>
|
|
<li>Compressed values block-size</li>
|
|
<li>Compressed values block</li>
|
|
</ul>
|
|
</li>
|
|
<li>
|
|
A sync-marker every few <code>100</code> bytes or so.
|
|
</li>
|
|
</ul>
|
|
|
|
<p>The compressed blocks of key lengths and value lengths consist of the
|
|
actual lengths of individual keys/values encoded in ZeroCompressedInteger
|
|
format.</p>
|
|
|
|
@see CompressionCodec]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SequenceFile -->
|
|
<!-- start class org.apache.hadoop.io.SequenceFile.CompressionType -->
|
|
<class name="SequenceFile.CompressionType" extends="java.lang.Enum<org.apache.hadoop.io.SequenceFile.CompressionType>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.io.SequenceFile.CompressionType[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.io.SequenceFile.CompressionType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The compression type used to compress key/value pairs in the
|
|
{@link SequenceFile}.
|
|
|
|
@see SequenceFile.Writer]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SequenceFile.CompressionType -->
|
|
<!-- start class org.apache.hadoop.io.SequenceFile.Metadata -->
|
|
<class name="SequenceFile.Metadata" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="SequenceFile.Metadata"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="SequenceFile.Metadata" type="java.util.TreeMap<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="get" return="org.apache.hadoop.io.Text"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="org.apache.hadoop.io.Text"/>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="org.apache.hadoop.io.Text"/>
|
|
<param name="value" type="org.apache.hadoop.io.Text"/>
|
|
</method>
|
|
<method name="getMetadata" return="java.util.TreeMap<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.io.SequenceFile.Metadata"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The class encapsulating with the metadata of a file.
|
|
The metadata of a file is a list of attribute name/value
|
|
pairs of Text type.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SequenceFile.Metadata -->
|
|
<!-- start class org.apache.hadoop.io.SequenceFile.Reader -->
|
|
<class name="SequenceFile.Reader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.io.Closeable"/>
|
|
<constructor name="SequenceFile.Reader" type="org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Open the named file.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="openFile" return="org.apache.hadoop.fs.FSDataInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="bufferSize" type="int"/>
|
|
<param name="length" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Override this method to specialize the type of
|
|
{@link FSDataInputStream} returned.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeyClassName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the name of the key class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeyClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the class of keys in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValueClassName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the name of the value class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValueClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the class of values in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isCompressed" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if values are compressed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isBlockCompressed" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if records are block-compressed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCompressionCodec" return="org.apache.hadoop.io.compress.CompressionCodec"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the compression codec of data in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMetadata" return="org.apache.hadoop.io.SequenceFile.Metadata"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the metadata object of the file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCurrentValue"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the 'value' corresponding to the last read 'key'.
|
|
@param val : The 'value' to be read.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCurrentValue" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the 'value' corresponding to the last read 'key'.
|
|
@param val : The 'value' to be read.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the next key in the file into <code>key</code>, skipping its
|
|
value. True if another entry exists, and false at end of file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the next key/value pair in the file into <code>key</code> and
|
|
<code>val</code>. Returns true if such a pair exists and false when at
|
|
end of file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Call {@link #nextRaw(DataOutputBuffer,SequenceFile.ValueBytes)}.">
|
|
<param name="buffer" type="org.apache.hadoop.io.DataOutputBuffer"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Call {@link #nextRaw(DataOutputBuffer,SequenceFile.ValueBytes)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createValueBytes" return="org.apache.hadoop.io.SequenceFile.ValueBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="nextRaw" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.DataOutputBuffer"/>
|
|
<param name="val" type="org.apache.hadoop.io.SequenceFile.ValueBytes"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read 'raw' records.
|
|
@param key - The buffer into which the key is read
|
|
@param val - The 'raw' value
|
|
@return Returns the total record length or -1 for end of file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="nextRawKey" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.DataOutputBuffer"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read 'raw' keys.
|
|
@param key - The buffer into which the key is read
|
|
@return Returns the key length or -1 for end of file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the next key in the file, skipping its
|
|
value. Return null at end of file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="nextRawValue" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="org.apache.hadoop.io.SequenceFile.ValueBytes"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read 'raw' values.
|
|
@param val - The 'raw' value
|
|
@return Returns the value length
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set the current byte position in the input file.
|
|
|
|
<p>The position passed must be a position returned by {@link
|
|
SequenceFile.Writer#getLength()} when writing this file. To seek to an arbitrary
|
|
position, use {@link SequenceFile.Reader#sync(long)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sync"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Seek to the next sync mark past a given position.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="syncSeen" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true iff the previous call to next passed a sync mark.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPosition" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the current byte position in the input file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the name of the file.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Reads key/value pairs from a sequence-format file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SequenceFile.Reader -->
|
|
<!-- start class org.apache.hadoop.io.SequenceFile.Sorter -->
|
|
<class name="SequenceFile.Sorter" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFile.Sorter" type="org.apache.hadoop.fs.FileSystem, java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>, java.lang.Class, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Sort and merge files containing the named classes.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SequenceFile.Sorter" type="org.apache.hadoop.fs.FileSystem, org.apache.hadoop.io.RawComparator, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Sort and merge using an arbitrary {@link RawComparator}.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setFactor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="factor" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the number of streams to merge at once.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFactor" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the number of streams to merge at once.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMemory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="memory" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the total amount of buffer memory, in bytes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMemory" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the total amount of buffer memory, in bytes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setProgressable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="progressable" type="org.apache.hadoop.util.Progressable"/>
|
|
<doc>
|
|
<![CDATA[Set the progressable object in order to report progress.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inFiles" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="outFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteInput" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Perform a file sort from a set of input files into an output file.
|
|
@param inFiles the files to be sorted
|
|
@param outFile the sorted output file
|
|
@param deleteInput should the input files be deleted as they are read?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sortAndIterate" return="org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inFiles" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="tempDir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteInput" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Perform a file sort from a set of input files and return an iterator.
|
|
@param inFiles the files to be sorted
|
|
@param tempDir the directory where temp files are created during sort
|
|
@param deleteInput should the input files be deleted as they are read?
|
|
@return iterator the RawKeyValueIterator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="outFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The backwards compatible interface to sort.
|
|
@param inFile the input file to sort
|
|
@param outFile the sorted output file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="merge" return="org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="segments" type="java.util.List<org.apache.hadoop.io.SequenceFile.Sorter.SegmentDescriptor>"/>
|
|
<param name="tmpDir" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Merges the list of segments of type <code>SegmentDescriptor</code>
|
|
@param segments the list of SegmentDescriptors
|
|
@param tmpDir the directory to write temporary files into
|
|
@return RawKeyValueIterator
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="merge" return="org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inNames" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="deleteInputs" type="boolean"/>
|
|
<param name="tmpDir" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Merges the contents of files passed in Path[] using a max factor value
|
|
that is already set
|
|
@param inNames the array of path names
|
|
@param deleteInputs true if the input files should be deleted when
|
|
unnecessary
|
|
@param tmpDir the directory to write temporary files into
|
|
@return RawKeyValueIteratorMergeQueue
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="merge" return="org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inNames" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="deleteInputs" type="boolean"/>
|
|
<param name="factor" type="int"/>
|
|
<param name="tmpDir" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Merges the contents of files passed in Path[]
|
|
@param inNames the array of path names
|
|
@param deleteInputs true if the input files should be deleted when
|
|
unnecessary
|
|
@param factor the factor that will be used as the maximum merge fan-in
|
|
@param tmpDir the directory to write temporary files into
|
|
@return RawKeyValueIteratorMergeQueue
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="merge" return="org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inNames" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="tempDir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="deleteInputs" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Merges the contents of files passed in Path[]
|
|
@param inNames the array of path names
|
|
@param tempDir the directory for creating temp files during merge
|
|
@param deleteInputs true if the input files should be deleted when
|
|
unnecessary
|
|
@return RawKeyValueIteratorMergeQueue
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="cloneFileAttributes" return="org.apache.hadoop.io.SequenceFile.Writer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="outputFile" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="prog" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Clones the attributes (like compression of the input file and creates a
|
|
corresponding Writer
|
|
@param inputFile the path of the input file whose attributes should be
|
|
cloned
|
|
@param outputFile the path of the output file
|
|
@param prog the Progressable to report status during the file write
|
|
@return Writer
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="records" type="org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator"/>
|
|
<param name="writer" type="org.apache.hadoop.io.SequenceFile.Writer"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes records from RawKeyValueIterator into a file represented by the
|
|
passed writer
|
|
@param records the RawKeyValueIterator
|
|
@param writer the Writer created earlier
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="merge"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inFiles" type="org.apache.hadoop.fs.Path[]"/>
|
|
<param name="outFile" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Merge the provided files.
|
|
@param inFiles the array of input path names
|
|
@param outFile the final output file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Sorts key/value pairs in a sequence-format file.
|
|
|
|
<p>For best performance, applications should make sure that the {@link
|
|
Writable#readFields(DataInput)} implementation of their keys is
|
|
very efficient. In particular, it should avoid allocating memory.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SequenceFile.Sorter -->
|
|
<!-- start interface org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator -->
|
|
<interface name="SequenceFile.Sorter.RawKeyValueIterator" abstract="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getKey" return="org.apache.hadoop.io.DataOutputBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Gets the current raw key
|
|
@return DataOutputBuffer
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValue" return="org.apache.hadoop.io.SequenceFile.ValueBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Gets the current raw value
|
|
@return ValueBytes
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Sets up the current key and value (for getKey and getValue)
|
|
@return true if there exists a key/value, false otherwise
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[closes the iterator so that the underlying streams can be closed
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="org.apache.hadoop.util.Progress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Gets the Progress object; this has a float (0.0 - 1.0)
|
|
indicating the bytes processed by the iterator so far]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The interface to iterate over raw keys/values of SequenceFiles.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator -->
|
|
<!-- start class org.apache.hadoop.io.SequenceFile.Sorter.SegmentDescriptor -->
|
|
<class name="SequenceFile.Sorter.SegmentDescriptor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Comparable"/>
|
|
<constructor name="SequenceFile.Sorter.SegmentDescriptor" type="long, long, org.apache.hadoop.fs.Path"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a segment
|
|
@param segmentOffset the offset of the segment in the file
|
|
@param segmentLength the length of the segment
|
|
@param segmentPathName the path name of the file containing the segment]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="doSync"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Do the sync checks]]>
|
|
</doc>
|
|
</method>
|
|
<method name="preserveInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="preserve" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Whether to delete the files when no longer needed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="shouldPreserveInput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="nextRawKey" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Fills up the rawKey object with the key returned by the Reader
|
|
@return true if there is a key returned; false, otherwise
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="nextRawValue" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rawValue" type="org.apache.hadoop.io.SequenceFile.ValueBytes"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Fills up the passed rawValue with the value corresponding to the key
|
|
read earlier
|
|
@param rawValue
|
|
@return the length of the value
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKey" return="org.apache.hadoop.io.DataOutputBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the stored rawKey]]>
|
|
</doc>
|
|
</method>
|
|
<method name="cleanup"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The default cleanup. Subclasses can override this with a custom
|
|
cleanup]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class defines a merge segment. This class can be subclassed to
|
|
provide a customized cleanup method implementation. In this
|
|
implementation, cleanup closes the file handle and deletes the file]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SequenceFile.Sorter.SegmentDescriptor -->
|
|
<!-- start interface org.apache.hadoop.io.SequenceFile.ValueBytes -->
|
|
<interface name="SequenceFile.ValueBytes" abstract="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="writeUncompressedBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="outStream" type="java.io.DataOutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes the uncompressed bytes to the outStream.
|
|
@param outStream : Stream to write uncompressed bytes into.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeCompressedBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="outStream" type="java.io.DataOutputStream"/>
|
|
<exception name="IllegalArgumentException" type="java.lang.IllegalArgumentException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write compressed bytes to outStream.
|
|
Note: that it will NOT compress the bytes if they are not compressed.
|
|
@param outStream : Stream to write compressed bytes into.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Size of stored data.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The interface to 'raw' values of SequenceFiles.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.SequenceFile.ValueBytes -->
|
|
<!-- start class org.apache.hadoop.io.SequenceFile.Writer -->
|
|
<class name="SequenceFile.Writer" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.io.Closeable"/>
|
|
<constructor name="SequenceFile.Writer" type="org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named file.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SequenceFile.Writer" type="org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile.Metadata"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named file with write-progress reporter.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SequenceFile.Writer" type="org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile.Metadata"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named file with write-progress reporter.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getKeyClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the class of keys in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValueClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the class of values in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCompressionCodec" return="org.apache.hadoop.io.compress.CompressionCodec"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the compression codec of data in this file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sync"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[create a sync point]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close the file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="val" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append a key/value pair.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<param name="val" type="java.lang.Object"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append a key/value pair.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="appendRaw"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="keyData" type="byte[]"/>
|
|
<param name="keyOffset" type="int"/>
|
|
<param name="keyLength" type="int"/>
|
|
<param name="val" type="org.apache.hadoop.io.SequenceFile.ValueBytes"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns the current length of the output file.
|
|
|
|
<p>This always returns a synchronized position. In other words,
|
|
immediately after calling {@link SequenceFile.Reader#seek(long)} with a position
|
|
returned by this method, {@link SequenceFile.Reader#next(Writable)} may be called. However
|
|
the key may be earlier in the file than key last written when this
|
|
method was called (e.g., with block-compression, it may be the first key
|
|
in the block that was being written when this method was called).]]>
|
|
</doc>
|
|
</method>
|
|
<field name="keySerializer" type="org.apache.hadoop.io.serializer.Serializer"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="uncompressedValSerializer" type="org.apache.hadoop.io.serializer.Serializer"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="compressedValSerializer" type="org.apache.hadoop.io.serializer.Serializer"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Write key/value pairs to a sequence-format file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SequenceFile.Writer -->
|
|
<!-- start class org.apache.hadoop.io.SetFile -->
|
|
<class name="SetFile" extends="org.apache.hadoop.io.MapFile"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SetFile"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[A file-based set of keys.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SetFile -->
|
|
<!-- start class org.apache.hadoop.io.SetFile.Reader -->
|
|
<class name="SetFile.Reader" extends="org.apache.hadoop.io.MapFile.Reader"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SetFile.Reader" type="org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a set reader for the named set.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SetFile.Reader" type="org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a set reader for the named set using the named comparator.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="seek" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the next key in a set into <code>key</code>. Returns
|
|
true if such a key exists and false when at the end of the set.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the matching key from a set into <code>key</code>.
|
|
Returns <code>key</code>, or null if no match exists.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Provide access to an existing set file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SetFile.Reader -->
|
|
<!-- start class org.apache.hadoop.io.SetFile.Writer -->
|
|
<class name="SetFile.Writer" extends="org.apache.hadoop.io.MapFile.Writer"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SetFile.Writer" type="org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="pass a Configuration too">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create the named set for keys of the named class.
|
|
@deprecated pass a Configuration too]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SetFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>, org.apache.hadoop.io.SequenceFile.CompressionType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a set naming the element class and compression type.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SetFile.Writer" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, java.lang.String, org.apache.hadoop.io.WritableComparator, org.apache.hadoop.io.SequenceFile.CompressionType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a set naming the element comparator and compression type.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Append a key to a set. The key must be strictly greater than the
|
|
previous key added to the set.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Write a new set file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SetFile.Writer -->
|
|
<!-- start class org.apache.hadoop.io.SortedMapWritable -->
|
|
<class name="SortedMapWritable" extends="org.apache.hadoop.io.AbstractMapWritable"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.util.SortedMap<org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable>"/>
|
|
<constructor name="SortedMapWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[default constructor.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SortedMapWritable" type="org.apache.hadoop.io.SortedMapWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Copy constructor.
|
|
|
|
@param other the map to copy from]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="comparator" return="java.util.Comparator<? super org.apache.hadoop.io.WritableComparable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="firstKey" return="org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="headMap" return="java.util.SortedMap<org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="toKey" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="lastKey" return="org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="subMap" return="java.util.SortedMap<org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fromKey" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="toKey" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="tailMap" return="java.util.SortedMap<org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fromKey" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="containsKey" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="containsValue" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="entrySet" return="java.util.Set<java.util.Map.Entry<org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isEmpty" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="keySet" return="java.util.Set<org.apache.hadoop.io.WritableComparable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="put" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="org.apache.hadoop.io.Writable"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="putAll"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="t" type="java.util.Map<? extends org.apache.hadoop.io.WritableComparable, ? extends org.apache.hadoop.io.Writable>"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="size" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="values" return="java.util.Collection<org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Writable SortedMap.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.SortedMapWritable -->
|
|
<!-- start interface org.apache.hadoop.io.Stringifier -->
|
|
<interface name="Stringifier" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.io.Closeable"/>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="obj" type="T"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Converts the object to a string representation
|
|
@param obj the object to convert
|
|
@return the string representation of the object
|
|
@throws IOException if the object cannot be converted]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fromString" return="T"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Restores the object from its string representation.
|
|
@param str the string representation of the object
|
|
@return restored object
|
|
@throws IOException if the object cannot be restored]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Closes this object.
|
|
@throws IOException if an I/O error occurs]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Stringifier interface offers two methods to convert an object
|
|
to a string representation and restore the object given its
|
|
string representation.
|
|
@param <T> the class of the objects to stringify]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.Stringifier -->
|
|
<!-- start class org.apache.hadoop.io.Text -->
|
|
<class name="Text" extends="org.apache.hadoop.io.BinaryComparable"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable<org.apache.hadoop.io.BinaryComparable>"/>
|
|
<constructor name="Text"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Text" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct from a string.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Text" type="org.apache.hadoop.io.Text"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct from another text.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Text" type="byte[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct from a byte array.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getBytes" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the raw bytes; however, only data up to {@link #getLength()} is
|
|
valid.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the number of bytes in the byte array]]>
|
|
</doc>
|
|
</method>
|
|
<method name="charAt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="position" type="int"/>
|
|
<doc>
|
|
<![CDATA[Returns the Unicode Scalar Value (32-bit integer value)
|
|
for the character at <code>position</code>. Note that this
|
|
method avoids using the converter or doing String instatiation
|
|
@return the Unicode scalar value at position or -1
|
|
if the position is invalid or points to a
|
|
trailing byte]]>
|
|
</doc>
|
|
</method>
|
|
<method name="find" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="what" type="java.lang.String"/>
|
|
</method>
|
|
<method name="find" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="what" type="java.lang.String"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Finds any occurence of <code>what</code> in the backing
|
|
buffer, starting as position <code>start</code>. The starting
|
|
position is measured in bytes and the return value is in
|
|
terms of byte position in the buffer. The backing buffer is
|
|
not converted to a string for this operation.
|
|
@return byte position of the first occurence of the search
|
|
string in the UTF-8 buffer or -1 if not found]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set to contain the contents of a string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<doc>
|
|
<![CDATA[Set to a utf8 byte array]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.io.Text"/>
|
|
<doc>
|
|
<![CDATA[copy a text.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the Text to range of bytes
|
|
@param utf8 the data to copy from
|
|
@param start the first position of the new string
|
|
@param len the number of bytes of the new string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Append a range of bytes to the end of the given text
|
|
@param utf8 the data to copy from
|
|
@param start the first position to append from utf8
|
|
@param len the number of bytes to append]]>
|
|
</doc>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Clear the string to empty.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Convert text back to string
|
|
@see java.lang.Object#toString()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[deserialize]]>
|
|
</doc>
|
|
</method>
|
|
<method name="skip"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Skips over one Text in the input.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[serialize
|
|
write this object to out
|
|
length uses zero-compressed encoding
|
|
@see Writable#write(DataOutput)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a Text with the same contents.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="decode" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<exception name="CharacterCodingException" type="java.nio.charset.CharacterCodingException"/>
|
|
<doc>
|
|
<![CDATA[Converts the provided byte array to a String using the
|
|
UTF-8 encoding. If the input is malformed,
|
|
replace by a default value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="decode" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="CharacterCodingException" type="java.nio.charset.CharacterCodingException"/>
|
|
</method>
|
|
<method name="decode" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<param name="replace" type="boolean"/>
|
|
<exception name="CharacterCodingException" type="java.nio.charset.CharacterCodingException"/>
|
|
<doc>
|
|
<![CDATA[Converts the provided byte array to a String using the
|
|
UTF-8 encoding. If <code>replace</code> is true, then
|
|
malformed input is replaced with the
|
|
substitution character, which is U+FFFD. Otherwise the
|
|
method throws a MalformedInputException.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="encode" return="java.nio.ByteBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<exception name="CharacterCodingException" type="java.nio.charset.CharacterCodingException"/>
|
|
<doc>
|
|
<![CDATA[Converts the provided String to bytes using the
|
|
UTF-8 encoding. If the input is malformed,
|
|
invalid chars are replaced by a default value.
|
|
@return ByteBuffer: bytes stores at ByteBuffer.array()
|
|
and length is ByteBuffer.limit()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="encode" return="java.nio.ByteBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<param name="replace" type="boolean"/>
|
|
<exception name="CharacterCodingException" type="java.nio.charset.CharacterCodingException"/>
|
|
<doc>
|
|
<![CDATA[Converts the provided String to bytes using the
|
|
UTF-8 encoding. If <code>replace</code> is true, then
|
|
malformed input is replaced with the
|
|
substitution character, which is U+FFFD. Otherwise the
|
|
method throws a MalformedInputException.
|
|
@return ByteBuffer: bytes stores at ByteBuffer.array()
|
|
and length is ByteBuffer.limit()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a UTF8 encoded string from in]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeString" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="s" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a UTF8 encoded string to out]]>
|
|
</doc>
|
|
</method>
|
|
<method name="validateUTF8"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<exception name="MalformedInputException" type="java.nio.charset.MalformedInputException"/>
|
|
<doc>
|
|
<![CDATA[Check if a byte array contains valid utf-8
|
|
@param utf8 byte array
|
|
@throws MalformedInputException if the byte array contains invalid utf-8]]>
|
|
</doc>
|
|
</method>
|
|
<method name="validateUTF8"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf8" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="MalformedInputException" type="java.nio.charset.MalformedInputException"/>
|
|
<doc>
|
|
<![CDATA[Check to see if a byte array is valid utf-8
|
|
@param utf8 the array of bytes
|
|
@param start the offset of the first byte in the array
|
|
@param len the length of the byte sequence
|
|
@throws MalformedInputException if the byte array contains invalid bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="bytesToCodePoint" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="java.nio.ByteBuffer"/>
|
|
<doc>
|
|
<![CDATA[Returns the next code point at the current position in
|
|
the buffer. The buffer's position will be incremented.
|
|
Any mark set on this buffer will be changed by this method!]]>
|
|
</doc>
|
|
</method>
|
|
<method name="utf8Length" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[For the given string, returns the number of UTF-8 bytes
|
|
required to encode the string.
|
|
@param string text to encode
|
|
@return number of UTF-8 bytes required to encode]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class stores text using standard UTF8 encoding. It provides methods
|
|
to serialize, deserialize, and compare texts at byte level. The type of
|
|
length is integer and is serialized using zero-compressed format. <p>In
|
|
addition, it provides methods for string traversal without converting the
|
|
byte array to a string. <p>Also includes utilities for
|
|
serializing/deserialing a string, coding/decoding a string, checking if a
|
|
byte array contains valid UTF8 code, calculating the length of an encoded
|
|
string.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.Text -->
|
|
<!-- start class org.apache.hadoop.io.Text.Comparator -->
|
|
<class name="Text.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Text.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparator optimized for Text keys.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.Text.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.TwoDArrayWritable -->
|
|
<class name="TwoDArrayWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="TwoDArrayWritable" type="java.lang.Class"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="TwoDArrayWritable" type="java.lang.Class, org.apache.hadoop.io.Writable[][]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="toArray" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="values" type="org.apache.hadoop.io.Writable[][]"/>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable[][]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Writable for 2D arrays containing a matrix of instances of a class.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.TwoDArrayWritable -->
|
|
<!-- start class org.apache.hadoop.io.UTF8 -->
|
|
<class name="UTF8" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="replaced by Text">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="UTF8"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="UTF8" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct from a given string.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="UTF8" type="org.apache.hadoop.io.UTF8"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct from a given string.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getBytes" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The raw bytes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of bytes in the encoded string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set to contain the contents of a string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.io.UTF8"/>
|
|
<doc>
|
|
<![CDATA[Set to contain the contents of a string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="skip"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Skips over one UTF8 in the input.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compare two UTF8s.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Convert to a String.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a UTF8 with the same contents.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getBytes" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Convert a string to a UTF-8 encoded byte array.
|
|
@see String#getBytes(String)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a UTF-8 encoded string.
|
|
|
|
@see DataInput#readUTF()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeString" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="s" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a UTF-8 encoded string.
|
|
|
|
@see DataOutput#writeUTF(String)]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for strings that uses the UTF8 encoding.
|
|
|
|
<p>Also includes utilities for efficiently reading and writing UTF-8.
|
|
|
|
@deprecated replaced by Text]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.UTF8 -->
|
|
<!-- start class org.apache.hadoop.io.UTF8.Comparator -->
|
|
<class name="UTF8.Comparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="UTF8.Comparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparator optimized for UTF8 keys.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.UTF8.Comparator -->
|
|
<!-- start class org.apache.hadoop.io.VersionedWritable -->
|
|
<class name="VersionedWritable" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="VersionedWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getVersion" return="byte"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the version number of the current implementation.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A base class for Writables that provides version checking.
|
|
|
|
<p>This is useful when a class may evolve, so that instances written by the
|
|
old version of the class may still be processed by the new version. To
|
|
handle this situation, {@link #readFields(DataInput)}
|
|
implementations should catch {@link VersionMismatchException}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.VersionedWritable -->
|
|
<!-- start class org.apache.hadoop.io.VersionMismatchException -->
|
|
<class name="VersionMismatchException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="VersionMismatchException" type="byte, byte"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a string representation of this object.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Thrown by {@link VersionedWritable#readFields(DataInput)} when the
|
|
version of an object being read does not match the current implementation
|
|
version as returned by {@link VersionedWritable#getVersion()}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.VersionMismatchException -->
|
|
<!-- start class org.apache.hadoop.io.VIntWritable -->
|
|
<class name="VIntWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="VIntWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="VIntWritable" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the value of this VIntWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the value of this VIntWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a VIntWritable with the same value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compares two VIntWritables.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for integer values stored in variable-length format.
|
|
Such values take between one and five bytes. Smaller values take fewer bytes.
|
|
|
|
@see org.apache.hadoop.io.WritableUtils#readVInt(DataInput)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.VIntWritable -->
|
|
<!-- start class org.apache.hadoop.io.VLongWritable -->
|
|
<class name="VLongWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<constructor name="VLongWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="VLongWritable" type="long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the value of this LongWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the value of this LongWritable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Returns true iff <code>o</code> is a VLongWritable with the same value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Compares two VLongWritables.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparable for longs in a variable-length format. Such values take
|
|
between one and five bytes. Smaller values take fewer bytes.
|
|
|
|
@see org.apache.hadoop.io.WritableUtils#readVLong(DataInput)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.VLongWritable -->
|
|
<!-- start interface org.apache.hadoop.io.Writable -->
|
|
<interface name="Writable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serialize the fields of this object to <code>out</code>.
|
|
|
|
@param out <code>DataOuput</code> to serialize this object into.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Deserialize the fields of this object from <code>in</code>.
|
|
|
|
<p>For efficiency, implementations should attempt to re-use storage in the
|
|
existing object where possible.</p>
|
|
|
|
@param in <code>DataInput</code> to deseriablize this object from.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A serializable object which implements a simple, efficient, serialization
|
|
protocol, based on {@link DataInput} and {@link DataOutput}.
|
|
|
|
<p>Any <code>key</code> or <code>value</code> type in the Hadoop Map-Reduce
|
|
framework implements this interface.</p>
|
|
|
|
<p>Implementations typically implement a static <code>read(DataInput)</code>
|
|
method which constructs a new instance, calls {@link #readFields(DataInput)}
|
|
and returns the instance.</p>
|
|
|
|
<p>Example:</p>
|
|
<p><blockquote><pre>
|
|
public class MyWritable implements Writable {
|
|
// Some data
|
|
private int counter;
|
|
private long timestamp;
|
|
|
|
public void write(DataOutput out) throws IOException {
|
|
out.writeInt(counter);
|
|
out.writeLong(timestamp);
|
|
}
|
|
|
|
public void readFields(DataInput in) throws IOException {
|
|
counter = in.readInt();
|
|
timestamp = in.readLong();
|
|
}
|
|
|
|
public static MyWritable read(DataInput in) throws IOException {
|
|
MyWritable w = new MyWritable();
|
|
w.readFields(in);
|
|
return w;
|
|
}
|
|
}
|
|
</pre></blockquote></p>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.Writable -->
|
|
<!-- start interface org.apache.hadoop.io.WritableComparable -->
|
|
<interface name="WritableComparable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="java.lang.Comparable<T>"/>
|
|
<doc>
|
|
<![CDATA[A {@link Writable} which is also {@link Comparable}.
|
|
|
|
<p><code>WritableComparable</code>s can be compared to each other, typically
|
|
via <code>Comparator</code>s. Any type which is to be used as a
|
|
<code>key</code> in the Hadoop Map-Reduce framework should implement this
|
|
interface.</p>
|
|
|
|
<p>Example:</p>
|
|
<p><blockquote><pre>
|
|
public class MyWritableComparable implements WritableComparable {
|
|
// Some data
|
|
private int counter;
|
|
private long timestamp;
|
|
|
|
public void write(DataOutput out) throws IOException {
|
|
out.writeInt(counter);
|
|
out.writeLong(timestamp);
|
|
}
|
|
|
|
public void readFields(DataInput in) throws IOException {
|
|
counter = in.readInt();
|
|
timestamp = in.readLong();
|
|
}
|
|
|
|
public int compareTo(MyWritableComparable w) {
|
|
int thisValue = this.value;
|
|
int thatValue = ((IntWritable)o).value;
|
|
return (thisValue < thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
|
|
}
|
|
}
|
|
</pre></blockquote></p>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.WritableComparable -->
|
|
<!-- start class org.apache.hadoop.io.WritableComparator -->
|
|
<class name="WritableComparator" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.RawComparator"/>
|
|
<constructor name="WritableComparator" type="java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct for a {@link WritableComparable} implementation.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="WritableComparator" type="java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>, boolean"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="get" return="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>"/>
|
|
<doc>
|
|
<![CDATA[Get a comparator for a {@link WritableComparable} implementation.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="define"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class"/>
|
|
<param name="comparator" type="org.apache.hadoop.io.WritableComparator"/>
|
|
<doc>
|
|
<![CDATA[Register an optimized comparator for a {@link WritableComparable}
|
|
implementation.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeyClass" return="java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the WritableComparable implementation class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="newKey" return="org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a new {@link WritableComparable} instance.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
<doc>
|
|
<![CDATA[Optimization hook. Override this to make SequenceFile.Sorter's scream.
|
|
|
|
<p>The default implementation reads the data into two {@link
|
|
WritableComparable}s (using {@link
|
|
Writable#readFields(DataInput)}, then calls {@link
|
|
#compare(WritableComparable,WritableComparable)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="a" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="b" type="org.apache.hadoop.io.WritableComparable"/>
|
|
<doc>
|
|
<![CDATA[Compare two WritableComparables.
|
|
|
|
<p> The default implementation uses the natural ordering, calling {@link
|
|
Comparable#compareTo(Object)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="a" type="java.lang.Object"/>
|
|
<param name="b" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="compareBytes" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
<doc>
|
|
<![CDATA[Lexicographic order of binary data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashBytes" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Compute hash for binary data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readUnsignedShort" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse an unsigned short from a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse an integer from a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFloat" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse a float from a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse a long from a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readDouble" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse a double from a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded long from a byte array and returns it.
|
|
@param bytes byte array with decode long
|
|
@param start starting index
|
|
@throws java.io.IOException
|
|
@return deserialized long]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded integer from a byte array and returns it.
|
|
@param bytes byte array with the encoded integer
|
|
@param start start index
|
|
@throws java.io.IOException
|
|
@return deserialized integer]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Comparator for {@link WritableComparable}s.
|
|
|
|
<p>This base implemenation uses the natural ordering. To define alternate
|
|
orderings, override {@link #compare(WritableComparable,WritableComparable)}.
|
|
|
|
<p>One may optimize compare-intensive operations by overriding
|
|
{@link #compare(byte[],int,int,byte[],int,int)}. Static utility methods are
|
|
provided to assist in optimized implementations of this method.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.WritableComparator -->
|
|
<!-- start class org.apache.hadoop.io.WritableFactories -->
|
|
<class name="WritableFactories" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="setFactory"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class"/>
|
|
<param name="factory" type="org.apache.hadoop.io.WritableFactory"/>
|
|
<doc>
|
|
<![CDATA[Define a factory for a class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFactory" return="org.apache.hadoop.io.WritableFactory"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class"/>
|
|
<doc>
|
|
<![CDATA[Define a factory for a class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="newInstance" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Create a new instance of a class with a defined factory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="newInstance" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>"/>
|
|
<doc>
|
|
<![CDATA[Create a new instance of a class with a defined factory.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Factories for non-public writables. Defining a factory permits {@link
|
|
ObjectWritable} to be able to construct instances of non-public classes.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.WritableFactories -->
|
|
<!-- start interface org.apache.hadoop.io.WritableFactory -->
|
|
<interface name="WritableFactory" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="newInstance" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return a new instance.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A factory for a class of Writable.
|
|
@see WritableFactories]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.WritableFactory -->
|
|
<!-- start class org.apache.hadoop.io.WritableName -->
|
|
<class name="WritableName" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="setName"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="writableClass" type="java.lang.Class"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the name that a class should be known as to something other than the
|
|
class name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addName"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="writableClass" type="java.lang.Class"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Add an alternate name for a class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="writableClass" type="java.lang.Class"/>
|
|
<doc>
|
|
<![CDATA[Return the name for a class. Default is {@link Class#getName()}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the class for a name. Default is {@link Class#forName(String)}.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Utility to permit renaming of Writable implementation classes without
|
|
invalidiating files that contain their class name.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.WritableName -->
|
|
<!-- start class org.apache.hadoop.io.WritableUtils -->
|
|
<class name="WritableUtils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="WritableUtils"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="readCompressedByteArray" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="skipCompressedByteArray"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeCompressedByteArray" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="bytes" type="byte[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readCompressedString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeCompressedString" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="s" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeString"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="s" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeStringArray"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="s" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeCompressedStringArray"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="s" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readStringArray" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readCompressedStringArray" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="displayByteArray"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="record" type="byte[]"/>
|
|
</method>
|
|
<method name="clone" return="T extends org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="orig" type="T extends org.apache.hadoop.io.Writable"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Make a copy of a writable object using serialization to a buffer.
|
|
@param orig The object to copy
|
|
@return The copied object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="cloneInto"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dst" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="src" type="org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Make a copy of the writable object using serialiation to a buffer
|
|
@param dst the object to copy from
|
|
@param src the object to copy into, which is destroyed
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeVInt"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.DataOutput"/>
|
|
<param name="i" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serializes an integer to a binary stream with zero-compressed encoding.
|
|
For -120 <= i <= 127, only one byte is used with the actual value.
|
|
For other values of i, the first byte value indicates whether the
|
|
integer is positive or negative, and the number of bytes that follow.
|
|
If the first byte value v is between -121 and -124, the following integer
|
|
is positive, with number of bytes that follow are -(v+120).
|
|
If the first byte value v is between -125 and -128, the following integer
|
|
is negative, with number of bytes that follow are -(v+124). Bytes are
|
|
stored in the high-non-zero-byte-first order.
|
|
|
|
@param stream Binary output stream
|
|
@param i Integer to be serialized
|
|
@throws java.io.IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeVLong"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.DataOutput"/>
|
|
<param name="i" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serializes a long to a binary stream with zero-compressed encoding.
|
|
For -112 <= i <= 127, only one byte is used with the actual value.
|
|
For other values of i, the first byte value indicates whether the
|
|
long is positive or negative, and the number of bytes that follow.
|
|
If the first byte value v is between -113 and -120, the following long
|
|
is positive, with number of bytes that follow are -(v+112).
|
|
If the first byte value v is between -121 and -128, the following long
|
|
is negative, with number of bytes that follow are -(v+120). Bytes are
|
|
stored in the high-non-zero-byte-first order.
|
|
|
|
@param stream Binary output stream
|
|
@param i Long to be serialized
|
|
@throws java.io.IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded long from input stream and returns it.
|
|
@param stream Binary input stream
|
|
@throws java.io.IOException
|
|
@return deserialized long from stream.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded integer from input stream and returns it.
|
|
@param stream Binary input stream
|
|
@throws java.io.IOException
|
|
@return deserialized integer from stream.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isNegativeVInt" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Given the first byte of a vint/vlong, determine the sign
|
|
@param value the first byte
|
|
@return is the value negative]]>
|
|
</doc>
|
|
</method>
|
|
<method name="decodeVIntSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Parse the first byte of a vint/vlong to determine the number of bytes
|
|
@param value the first byte of the vint/vlong
|
|
@return the total number of bytes (1 to 9)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getVIntSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="long"/>
|
|
<doc>
|
|
<![CDATA[Get the encoded length if an integer is stored in a variable-length format
|
|
@return the encoded length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readEnum" return="T extends java.lang.Enum<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<param name="enumType" type="java.lang.Class<T>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read an Enum value from DataInput, Enums are read and written
|
|
using String values.
|
|
@param <T> Enum type
|
|
@param in DataInput to read from
|
|
@param enumType Class type of Enum
|
|
@return Enum represented by String read from DataInput
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeEnum"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<param name="enumVal" type="java.lang.Enum<?>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[writes String value of enum to DataOutput.
|
|
@param out Dataoutput stream
|
|
@param enumVal enum value
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="skipFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Skip <i>len</i> number of bytes in input stream<i>in</i>
|
|
@param in input stream
|
|
@param len number of bytes to skip
|
|
@throws IOException when skipped less number of bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toByteArray" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="writables" type="org.apache.hadoop.io.Writable[]"/>
|
|
<doc>
|
|
<![CDATA[Convert writables to a byte array]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.WritableUtils -->
|
|
</package>
|
|
<package name="org.apache.hadoop.io.compress">
|
|
<!-- start class org.apache.hadoop.io.compress.BZip2Codec -->
|
|
<class name="BZip2Codec" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<constructor name="BZip2Codec"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of BZip2Codec]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Creates CompressionOutputStream for BZip2
|
|
|
|
@param out
|
|
The output Stream
|
|
@return The BZip2 CompressionOutputStream
|
|
@throws java.io.IOException
|
|
Throws IO exception]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="compressor" type="org.apache.hadoop.io.compress.Compressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This functionality is currently not supported.
|
|
|
|
@throws java.lang.UnsupportedOperationException
|
|
Throws UnsupportedOperationException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Compressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This functionality is currently not supported.
|
|
|
|
@throws java.lang.UnsupportedOperationException
|
|
Throws UnsupportedOperationException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createCompressor" return="org.apache.hadoop.io.compress.Compressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This functionality is currently not supported.
|
|
|
|
@throws java.lang.UnsupportedOperationException
|
|
Throws UnsupportedOperationException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Creates CompressionInputStream to be used to read off uncompressed data.
|
|
|
|
@param in
|
|
The InputStream
|
|
@return Returns CompressionInputStream for BZip2
|
|
@throws java.io.IOException
|
|
Throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="decompressor" type="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This functionality is currently not supported.
|
|
|
|
@throws java.lang.UnsupportedOperationException
|
|
Throws UnsupportedOperationException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDecompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Decompressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This functionality is currently not supported.
|
|
|
|
@throws java.lang.UnsupportedOperationException
|
|
Throws UnsupportedOperationException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This functionality is currently not supported.
|
|
|
|
@throws java.lang.UnsupportedOperationException
|
|
Throws UnsupportedOperationException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultExtension" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[.bz2 is recognized as the default extension for compressed BZip2 files
|
|
|
|
@return A String telling the default bzip2 file extension]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class provides CompressionOutputStream and CompressionInputStream for
|
|
compression and decompression. Currently we dont have an implementation of
|
|
the Compressor and Decompressor interfaces, so those methods of
|
|
CompressionCodec which have a Compressor or Decompressor type argument, throw
|
|
UnsupportedOperationException.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.BZip2Codec -->
|
|
<!-- start class org.apache.hadoop.io.compress.CodecPool -->
|
|
<class name="CodecPool" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="CodecPool"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getCompressor" return="org.apache.hadoop.io.compress.Compressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<doc>
|
|
<![CDATA[Get a {@link Compressor} for the given {@link CompressionCodec} from the
|
|
pool or a new one.
|
|
|
|
@param codec the <code>CompressionCodec</code> for which to get the
|
|
<code>Compressor</code>
|
|
@return <code>Compressor</code> for the given
|
|
<code>CompressionCodec</code> from the pool or a new one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="codec" type="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<doc>
|
|
<![CDATA[Get a {@link Decompressor} for the given {@link CompressionCodec} from the
|
|
pool or a new one.
|
|
|
|
@param codec the <code>CompressionCodec</code> for which to get the
|
|
<code>Decompressor</code>
|
|
@return <code>Decompressor</code> for the given
|
|
<code>CompressionCodec</code> the pool or a new one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="returnCompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="compressor" type="org.apache.hadoop.io.compress.Compressor"/>
|
|
<doc>
|
|
<![CDATA[Return the {@link Compressor} to the pool.
|
|
|
|
@param compressor the <code>Compressor</code> to be returned to the pool]]>
|
|
</doc>
|
|
</method>
|
|
<method name="returnDecompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="decompressor" type="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<doc>
|
|
<![CDATA[Return the {@link Decompressor} to the pool.
|
|
|
|
@param decompressor the <code>Decompressor</code> to be returned to the
|
|
pool]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A global compressor/decompressor pool used to save and reuse
|
|
(possibly native) compression/decompression codecs.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.CodecPool -->
|
|
<!-- start interface org.apache.hadoop.io.compress.CompressionCodec -->
|
|
<interface name="CompressionCodec" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a {@link CompressionOutputStream} that will write to the given
|
|
{@link OutputStream}.
|
|
|
|
@param out the location for the final output stream
|
|
@return a stream the user can write uncompressed data to have it compressed
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="compressor" type="org.apache.hadoop.io.compress.Compressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a {@link CompressionOutputStream} that will write to the given
|
|
{@link OutputStream} with the given {@link Compressor}.
|
|
|
|
@param out the location for the final output stream
|
|
@param compressor compressor to use
|
|
@return a stream the user can write uncompressed data to have it compressed
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Compressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the type of {@link Compressor} needed by this {@link CompressionCodec}.
|
|
|
|
@return the type of compressor needed by this codec.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createCompressor" return="org.apache.hadoop.io.compress.Compressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new {@link Compressor} for use by this {@link CompressionCodec}.
|
|
|
|
@return a new compressor for use by this codec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a stream decompressor that will read from the given input stream.
|
|
|
|
@param in the stream to read compressed bytes from
|
|
@return a stream to read uncompressed bytes from
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="decompressor" type="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a {@link CompressionInputStream} that will read from the given
|
|
{@link InputStream} with the given {@link Decompressor}.
|
|
|
|
@param in the stream to read compressed bytes from
|
|
@param decompressor decompressor to use
|
|
@return a stream to read uncompressed bytes from
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDecompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Decompressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the type of {@link Decompressor} needed by this {@link CompressionCodec}.
|
|
|
|
@return the type of decompressor needed by this codec.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
|
|
|
|
@return a new decompressor for use by this codec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultExtension" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the default filename extension for this kind of compression.
|
|
@return the extension including the '.']]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class encapsulates a streaming compression/decompression pair.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.compress.CompressionCodec -->
|
|
<!-- start class org.apache.hadoop.io.compress.CompressionCodecFactory -->
|
|
<class name="CompressionCodecFactory" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="CompressionCodecFactory" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Find the codecs specified in the config value io.compression.codecs
|
|
and register them. Defaults to gzip and zip.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Print the extension map out as a string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCodecClasses" return="java.util.List<java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the list of codecs listed in the configuration
|
|
@param conf the configuration to look in
|
|
@return a list of the Configuration classes or null if the attribute
|
|
was not set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCodecClasses"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="classes" type="java.util.List<java.lang.Class>"/>
|
|
<doc>
|
|
<![CDATA[Sets a list of codec classes in the configuration.
|
|
@param conf the configuration to modify
|
|
@param classes the list of classes to set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCodec" return="org.apache.hadoop.io.compress.CompressionCodec"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Find the relevant compression codec for the given file based on its
|
|
filename suffix.
|
|
@param file the filename to check
|
|
@return the codec object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="removeSuffix" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="filename" type="java.lang.String"/>
|
|
<param name="suffix" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Removes a suffix from a filename, if it has it.
|
|
@param filename the filename to strip
|
|
@param suffix the suffix to remove
|
|
@return the shortened filename]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[A little test program.
|
|
@param args]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A factory that will find the correct codec for a given filename.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.CompressionCodecFactory -->
|
|
<!-- start class org.apache.hadoop.io.compress.CompressionInputStream -->
|
|
<class name="CompressionInputStream" extends="java.io.InputStream"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="CompressionInputStream" type="java.io.InputStream"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a compression input stream that reads
|
|
the decompressed bytes from the given stream.
|
|
|
|
@param in The input stream to be compressed.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read bytes from the stream.
|
|
Made abstract to prevent leakage to underlying stream.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="resetState"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reset the decompressor to its initial state and discard any buffered data,
|
|
as the underlying stream may have been repositioned.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="in" type="java.io.InputStream"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The input stream to be compressed.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A compression input stream.
|
|
|
|
<p>Implementations are assumed to be buffered. This permits clients to
|
|
reposition the underlying input stream then call {@link #resetState()},
|
|
without having to also synchronize client buffers.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.CompressionInputStream -->
|
|
<!-- start class org.apache.hadoop.io.compress.CompressionOutputStream -->
|
|
<class name="CompressionOutputStream" extends="java.io.OutputStream"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="CompressionOutputStream" type="java.io.OutputStream"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a compression output stream that writes
|
|
the compressed bytes to the given stream.
|
|
@param out]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="flush"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write compressed bytes to the stream.
|
|
Made abstract to prevent leakage to underlying stream.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="finish"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Finishes writing compressed data to the output stream
|
|
without closing the underlying stream.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="resetState"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reset the compression to the initial state.
|
|
Does not reset the underlying stream.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="out" type="java.io.OutputStream"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The output stream to be compressed.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A compression output stream.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.CompressionOutputStream -->
|
|
<!-- start interface org.apache.hadoop.io.compress.Compressor -->
|
|
<interface name="Compressor" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets input data for compression.
|
|
This should be called whenever #needsInput() returns
|
|
<code>true</code> indicating that more input data is required.
|
|
|
|
@param b Input data
|
|
@param off Start offset
|
|
@param len Length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if the input data buffer is empty and
|
|
#setInput() should be called to provide more input.
|
|
|
|
@return <code>true</code> if the input data buffer is empty and
|
|
#setInput() should be called in order to provide more input.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets preset dictionary for compression. A preset dictionary
|
|
is used when the history buffer can be predetermined.
|
|
|
|
@param b Dictionary data bytes
|
|
@param off Start offset
|
|
@param len Length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesRead" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return number of uncompressed bytes input so far.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesWritten" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return number of compressed bytes output so far.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="finish"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[When called, indicates that compression should end
|
|
with the current contents of the input buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if the end of the compressed
|
|
data output stream has been reached.
|
|
@return <code>true</code> if the end of the compressed
|
|
data output stream has been reached.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compress" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Fills specified buffer with compressed data. Returns actual number
|
|
of bytes of compressed data. A return value of 0 indicates that
|
|
needsInput() should be called in order to determine if more input
|
|
data is required.
|
|
|
|
@param b Buffer for the compressed data
|
|
@param off Start offset of the data
|
|
@param len Size of the buffer
|
|
@return The actual number of bytes of compressed data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resets compressor so that a new set of input data can be processed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Closes the compressor and discards any unprocessed input.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Specification of a stream-based 'compressor' which can be
|
|
plugged into a {@link CompressionOutputStream} to compress data.
|
|
This is modelled after {@link java.util.zip.Deflater}]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.compress.Compressor -->
|
|
<!-- start interface org.apache.hadoop.io.compress.Decompressor -->
|
|
<interface name="Decompressor" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets input data for decompression.
|
|
This should be called whenever #needsInput() returns
|
|
<code>true</code> indicating that more input data is required.
|
|
|
|
@param b Input data
|
|
@param off Start offset
|
|
@param len Length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if the input data buffer is empty and
|
|
#setInput() should be called to provide more input.
|
|
|
|
@return <code>true</code> if the input data buffer is empty and
|
|
#setInput() should be called in order to provide more input.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets preset dictionary for compression. A preset dictionary
|
|
is used when the history buffer can be predetermined.
|
|
|
|
@param b Dictionary data bytes
|
|
@param off Start offset
|
|
@param len Length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="needsDictionary" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns <code>true</code> if a preset dictionary is needed for decompression.
|
|
@return <code>true</code> if a preset dictionary is needed for decompression]]>
|
|
</doc>
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if the end of the compressed
|
|
data output stream has been reached.
|
|
@return <code>true</code> if the end of the compressed
|
|
data output stream has been reached.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="decompress" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Fills specified buffer with uncompressed data. Returns actual number
|
|
of bytes of uncompressed data. A return value of 0 indicates that
|
|
#needsInput() should be called in order to determine if more input
|
|
data is required.
|
|
|
|
@param b Buffer for the compressed data
|
|
@param off Start offset of the data
|
|
@param len Size of the buffer
|
|
@return The actual number of bytes of compressed data.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Resets decompressor so that a new set of input data can be processed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Closes the decompressor and discards any unprocessed input.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Specification of a stream-based 'de-compressor' which can be
|
|
plugged into a {@link CompressionInputStream} to compress data.
|
|
This is modelled after {@link java.util.zip.Inflater}]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.compress.Decompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.DefaultCodec -->
|
|
<class name="DefaultCodec" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<implements name="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<constructor name="DefaultCodec"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="compressor" type="org.apache.hadoop.io.compress.Compressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getCompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Compressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createCompressor" return="org.apache.hadoop.io.compress.Compressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="decompressor" type="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getDecompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Decompressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getDefaultExtension" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.DefaultCodec -->
|
|
<!-- start class org.apache.hadoop.io.compress.GzipCodec -->
|
|
<class name="GzipCodec" extends="org.apache.hadoop.io.compress.DefaultCodec"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="GzipCodec"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="compressor" type="org.apache.hadoop.io.compress.Compressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createCompressor" return="org.apache.hadoop.io.compress.Compressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getCompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Compressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="decompressor" type="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getDecompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Decompressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getDefaultExtension" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class creates gzip compressors/decompressors.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.GzipCodec -->
|
|
<!-- start class org.apache.hadoop.io.compress.GzipCodec.GzipInputStream -->
|
|
<class name="GzipCodec.GzipInputStream" extends="org.apache.hadoop.io.compress.DecompressorStream"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<constructor name="GzipCodec.GzipInputStream" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<constructor name="GzipCodec.GzipInputStream" type="org.apache.hadoop.io.compress.DecompressorStream"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Allow subclasses to directly set the inflater stream.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="available" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="data" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="skip" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="offset" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="resetState"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.GzipCodec.GzipInputStream -->
|
|
<!-- start class org.apache.hadoop.io.compress.GzipCodec.GzipOutputStream -->
|
|
<class name="GzipCodec.GzipOutputStream" extends="org.apache.hadoop.io.compress.CompressorStream"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<constructor name="GzipCodec.GzipOutputStream" type="java.io.OutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<constructor name="GzipCodec.GzipOutputStream" type="org.apache.hadoop.io.compress.CompressorStream"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Allow children types to put a different type in here.
|
|
@param out the Deflater stream to use]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="flush"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="data" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="finish"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="resetState"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A bridge that wraps around a DeflaterOutputStream to make it
|
|
a CompressionOutputStream.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.GzipCodec.GzipOutputStream -->
|
|
<!-- start class org.apache.hadoop.io.compress.LzoCodec -->
|
|
<class name="LzoCodec" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<implements name="org.apache.hadoop.io.compress.CompressionCodec"/>
|
|
<constructor name="LzoCodec"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="isNativeLzoLoaded" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Check if native-lzo library is loaded & initialized.
|
|
|
|
@param conf configuration
|
|
@return <code>true</code> if native-lzo library is loaded & initialized;
|
|
else <code>false</code>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="compressor" type="org.apache.hadoop.io.compress.Compressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getCompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Compressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createCompressor" return="org.apache.hadoop.io.compress.Compressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="decompressor" type="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getDecompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Decompressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getDefaultExtension" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the default filename extension for this kind of compression.
|
|
@return the extension including the '.']]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link org.apache.hadoop.io.compress.CompressionCodec} for a streaming
|
|
<b>lzo</b> compression/decompression pair.
|
|
http://www.oberhumer.com/opensource/lzo/]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.LzoCodec -->
|
|
<!-- start class org.apache.hadoop.io.compress.LzopCodec -->
|
|
<class name="LzopCodec" extends="org.apache.hadoop.io.compress.LzoCodec"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LzopCodec"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="createOutputStream" return="org.apache.hadoop.io.compress.CompressionOutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="compressor" type="org.apache.hadoop.io.compress.Compressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createInputStream" return="org.apache.hadoop.io.compress.CompressionInputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<param name="decompressor" type="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getDefaultExtension" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link org.apache.hadoop.io.compress.CompressionCodec} for a streaming
|
|
<b>lzo</b> compression/decompression pair compatible with lzop.
|
|
http://www.lzop.org/]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.LzopCodec -->
|
|
<!-- start class org.apache.hadoop.io.compress.LzopCodec.LzopDecompressor -->
|
|
<class name="LzopCodec.LzopDecompressor" extends="org.apache.hadoop.io.compress.lzo.LzoDecompressor"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<constructor name="LzopCodec.LzopDecompressor" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create an LzoDecompressor with LZO1X strategy (the only lzo algorithm
|
|
supported by lzop).]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="initHeaderFlags"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dflags" type="java.util.EnumSet<org.apache.hadoop.io.compress.LzopCodec.DChecksum>"/>
|
|
<param name="cflags" type="java.util.EnumSet<org.apache.hadoop.io.compress.LzopCodec.CChecksum>"/>
|
|
<doc>
|
|
<![CDATA[Given a set of decompressed and compressed checksums,]]>
|
|
</doc>
|
|
</method>
|
|
<method name="resetChecksum"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Reset all checksums registered for this decompressor instance.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="verifyDChecksum" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="typ" type="org.apache.hadoop.io.compress.LzopCodec.DChecksum"/>
|
|
<param name="checksum" type="int"/>
|
|
<doc>
|
|
<![CDATA[Given a checksum type, verify its value against that observed in
|
|
decompressed data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="verifyCChecksum" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="typ" type="org.apache.hadoop.io.compress.LzopCodec.CChecksum"/>
|
|
<param name="checksum" type="int"/>
|
|
<doc>
|
|
<![CDATA[Given a checksum type, verity its value against that observed in
|
|
compressed data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="decompress" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.LzopCodec.LzopDecompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.LzopCodec.LzopInputStream -->
|
|
<class name="LzopCodec.LzopInputStream" extends="org.apache.hadoop.io.compress.BlockDecompressorStream"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<constructor name="LzopCodec.LzopInputStream" type="java.io.InputStream, org.apache.hadoop.io.compress.Decompressor, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="readHeader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read and verify an lzo header, setting relevant block checksum options
|
|
and ignoring most everything else.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.LzopCodec.LzopInputStream -->
|
|
<!-- start class org.apache.hadoop.io.compress.LzopCodec.LzopOutputStream -->
|
|
<class name="LzopCodec.LzopOutputStream" extends="org.apache.hadoop.io.compress.BlockCompressorStream"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<constructor name="LzopCodec.LzopOutputStream" type="java.io.OutputStream, org.apache.hadoop.io.compress.Compressor, int, org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="writeLzopHeader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<param name="strategy" type="org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write an lzop-compatible header to the OutputStream provided.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close the underlying stream and write a null word to the output stream.]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.LzopCodec.LzopOutputStream -->
|
|
</package>
|
|
<package name="org.apache.hadoop.io.compress.bzip2">
|
|
<!-- start interface org.apache.hadoop.io.compress.bzip2.BZip2Constants -->
|
|
<interface name="BZip2Constants" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<field name="baseBlockSize" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="MAX_ALPHA_SIZE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="MAX_CODE_LEN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RUNA" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RUNB" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="N_GROUPS" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="G_SIZE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="N_ITERS" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="MAX_SELECTORS" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="NUM_OVERSHOOT_BYTES" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="rNums" type="int[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This array really shouldn't be here. Again, for historical purposes it
|
|
is.
|
|
|
|
<p>
|
|
FIXME: This array should be in a private or package private location,
|
|
since it could be modified by malicious code.
|
|
</p>]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Base class for both the compress and decompress classes. Holds common arrays,
|
|
and static data.
|
|
<p>
|
|
This interface is public for historical purposes. You should have no need to
|
|
use it.
|
|
</p>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.compress.bzip2.BZip2Constants -->
|
|
<!-- start class org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor -->
|
|
<class name="BZip2DummyCompressor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Compressor"/>
|
|
<constructor name="BZip2DummyCompressor"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compress" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finish"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getBytesRead" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getBytesWritten" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This is a dummy compressor for BZip2.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor -->
|
|
<class name="BZip2DummyDecompressor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<constructor name="BZip2DummyDecompressor"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="decompress" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="needsDictionary" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This is a dummy decompressor for BZip2.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.bzip2.CBZip2InputStream -->
|
|
<class name="CBZip2InputStream" extends="java.io.InputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.bzip2.BZip2Constants"/>
|
|
<constructor name="CBZip2InputStream" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Constructs a new CBZip2InputStream which decompresses bytes read from the
|
|
specified stream.
|
|
|
|
<p>
|
|
Although BZip2 headers are marked with the magic <tt>"Bz"</tt> this
|
|
constructor expects the next byte in the stream to be the first one after
|
|
the magic. Thus callers have to skip the first two bytes. Otherwise this
|
|
constructor will throw an exception.
|
|
</p>
|
|
|
|
@throws IOException
|
|
if the stream content is malformed or an I/O error occurs.
|
|
@throws NullPointerException
|
|
if <tt>in == null</tt>]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dest" type="byte[]"/>
|
|
<param name="offs" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An input stream that decompresses from the BZip2 format (without the file
|
|
header chars) to be read as any other stream.
|
|
|
|
<p>
|
|
The decompression requires large amounts of memory. Thus you should call the
|
|
{@link #close() close()} method as soon as possible, to force
|
|
<tt>CBZip2InputStream</tt> to release the allocated memory. See
|
|
{@link CBZip2OutputStream CBZip2OutputStream} for information about memory
|
|
usage.
|
|
</p>
|
|
|
|
<p>
|
|
<tt>CBZip2InputStream</tt> reads bytes from the compressed source stream via
|
|
the single byte {@link java.io.InputStream#read() read()} method exclusively.
|
|
Thus you should consider to use a buffered source stream.
|
|
</p>
|
|
|
|
<p>
|
|
Instances of this class are not threadsafe.
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.bzip2.CBZip2InputStream -->
|
|
<!-- start class org.apache.hadoop.io.compress.bzip2.CBZip2OutputStream -->
|
|
<class name="CBZip2OutputStream" extends="java.io.OutputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.bzip2.BZip2Constants"/>
|
|
<constructor name="CBZip2OutputStream" type="java.io.OutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Constructs a new <tt>CBZip2OutputStream</tt> with a blocksize of 900k.
|
|
|
|
<p>
|
|
<b>Attention: </b>The caller is resonsible to write the two BZip2 magic
|
|
bytes <tt>"BZ"</tt> to the specified stream prior to calling this
|
|
constructor.
|
|
</p>
|
|
|
|
@param out *
|
|
the destination stream.
|
|
|
|
@throws IOException
|
|
if an I/O error occurs in the specified stream.
|
|
@throws NullPointerException
|
|
if <code>out == null</code>.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="CBZip2OutputStream" type="java.io.OutputStream, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Constructs a new <tt>CBZip2OutputStream</tt> with specified blocksize.
|
|
|
|
<p>
|
|
<b>Attention: </b>The caller is resonsible to write the two BZip2 magic
|
|
bytes <tt>"BZ"</tt> to the specified stream prior to calling this
|
|
constructor.
|
|
</p>
|
|
|
|
|
|
@param out
|
|
the destination stream.
|
|
@param blockSize
|
|
the blockSize as 100k units.
|
|
|
|
@throws IOException
|
|
if an I/O error occurs in the specified stream.
|
|
@throws IllegalArgumentException
|
|
if <code>(blockSize < 1) || (blockSize > 9)</code>.
|
|
@throws NullPointerException
|
|
if <code>out == null</code>.
|
|
|
|
@see #MIN_BLOCKSIZE
|
|
@see #MAX_BLOCKSIZE]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="hbMakeCodeLengths"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="len" type="char[]"/>
|
|
<param name="freq" type="int[]"/>
|
|
<param name="alphaSize" type="int"/>
|
|
<param name="maxLen" type="int"/>
|
|
<doc>
|
|
<![CDATA[This method is accessible by subclasses for historical purposes. If you
|
|
don't know what it does then you don't need it.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="chooseBlockSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inputLength" type="long"/>
|
|
<doc>
|
|
<![CDATA[Chooses a blocksize based on the given length of the data to compress.
|
|
|
|
@return The blocksize, between {@link #MIN_BLOCKSIZE} and
|
|
{@link #MAX_BLOCKSIZE} both inclusive. For a negative
|
|
<tt>inputLength</tt> this method returns <tt>MAX_BLOCKSIZE</tt>
|
|
always.
|
|
|
|
@param inputLength
|
|
The length of the data which will be compressed by
|
|
<tt>CBZip2OutputStream</tt>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="finalize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="Throwable" type="java.lang.Throwable"/>
|
|
<doc>
|
|
<![CDATA[Overriden to close the stream.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="finish"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="flush"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getBlockSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the blocksize parameter specified at construction time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="byte[]"/>
|
|
<param name="offs" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="MIN_BLOCKSIZE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The minimum supported blocksize <tt> == 1</tt>.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="MAX_BLOCKSIZE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The maximum supported blocksize <tt> == 9</tt>.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="SETMASK" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="CLEARMASK" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="GREATER_ICOST" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="LESSER_ICOST" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="SMALL_THRESH" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="DEPTH_THRESH" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="WORK_FACTOR" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="QSORT_STACK_SIZE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constant is accessible by subclasses for historical purposes. If you
|
|
don't know what it means then you don't need it.
|
|
<p>
|
|
If you are ever unlucky/improbable enough to get a stack overflow whilst
|
|
sorting, increase the following constant and try again. In practice I
|
|
have never seen the stack go above 27 elems, so the following limit seems
|
|
very generous.
|
|
</p>]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An output stream that compresses into the BZip2 format (without the file
|
|
header chars) into another stream.
|
|
|
|
<p>
|
|
The compression requires large amounts of memory. Thus you should call the
|
|
{@link #close() close()} method as soon as possible, to force
|
|
<tt>CBZip2OutputStream</tt> to release the allocated memory.
|
|
</p>
|
|
|
|
<p>
|
|
You can shrink the amount of allocated memory and maybe raise the compression
|
|
speed by choosing a lower blocksize, which in turn may cause a lower
|
|
compression ratio. You can avoid unnecessary memory allocation by avoiding
|
|
using a blocksize which is bigger than the size of the input.
|
|
</p>
|
|
|
|
<p>
|
|
You can compute the memory usage for compressing by the following formula:
|
|
</p>
|
|
|
|
<pre>
|
|
<code>400k + (9 * blocksize)</code>.
|
|
</pre>
|
|
|
|
<p>
|
|
To get the memory required for decompression by {@link CBZip2InputStream
|
|
CBZip2InputStream} use
|
|
</p>
|
|
|
|
<pre>
|
|
<code>65k + (5 * blocksize)</code>.
|
|
</pre>
|
|
|
|
<table width="100%" border="1">
|
|
<colgroup> <col width="33%" /> <col width="33%" /> <col width="33%" />
|
|
</colgroup>
|
|
<tr>
|
|
<th colspan="3">Memory usage by blocksize</th>
|
|
</tr>
|
|
<tr>
|
|
<th align="right">Blocksize</th> <th align="right">Compression<br>
|
|
memory usage</th> <th align="right">Decompression<br>
|
|
memory usage</th>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">100k</td>
|
|
<td align="right">1300k</td>
|
|
<td align="right">565k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">200k</td>
|
|
<td align="right">2200k</td>
|
|
<td align="right">1065k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">300k</td>
|
|
<td align="right">3100k</td>
|
|
<td align="right">1565k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">400k</td>
|
|
<td align="right">4000k</td>
|
|
<td align="right">2065k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">500k</td>
|
|
<td align="right">4900k</td>
|
|
<td align="right">2565k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">600k</td>
|
|
<td align="right">5800k</td>
|
|
<td align="right">3065k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">700k</td>
|
|
<td align="right">6700k</td>
|
|
<td align="right">3565k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">800k</td>
|
|
<td align="right">7600k</td>
|
|
<td align="right">4065k</td>
|
|
</tr>
|
|
<tr>
|
|
<td align="right">900k</td>
|
|
<td align="right">8500k</td>
|
|
<td align="right">4565k</td>
|
|
</tr>
|
|
</table>
|
|
|
|
<p>
|
|
For decompression <tt>CBZip2InputStream</tt> allocates less memory if the
|
|
bzipped input is smaller than one block.
|
|
</p>
|
|
|
|
<p>
|
|
Instances of this class are not threadsafe.
|
|
</p>
|
|
|
|
<p>
|
|
TODO: Update to BZip2 1.0.1
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.bzip2.CBZip2OutputStream -->
|
|
</package>
|
|
<package name="org.apache.hadoop.io.compress.lzo">
|
|
<!-- start class org.apache.hadoop.io.compress.lzo.LzoCompressor -->
|
|
<class name="LzoCompressor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Compressor"/>
|
|
<constructor name="LzoCompressor" type="org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new compressor using the specified {@link CompressionStrategy}.
|
|
|
|
@param strategy lzo compression algorithm to use
|
|
@param directBufferSize size of the direct buffer to be used.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="LzoCompressor"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new compressor with the default lzo1x_1 compression.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="isNativeLzoLoaded" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Check if lzo compressors are loaded and initialized.
|
|
|
|
@return <code>true</code> if lzo compressors are loaded & initialized,
|
|
else <code>false</code>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="finish"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compress" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getBytesRead" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return number of bytes given to this compressor since last reset.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesWritten" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return number of bytes consumed by callers of compress since last reset.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Noop.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LZO_LIBRARY_VERSION" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A {@link Compressor} based on the lzo algorithm.
|
|
http://www.oberhumer.com/opensource/lzo/]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.lzo.LzoCompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy -->
|
|
<class name="LzoCompressor.CompressionStrategy" extends="java.lang.Enum<org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The compression algorithm for lzo library.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.lzo.LzoCompressor.CompressionStrategy -->
|
|
<!-- start class org.apache.hadoop.io.compress.lzo.LzoDecompressor -->
|
|
<class name="LzoDecompressor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<constructor name="LzoDecompressor" type="org.apache.hadoop.io.compress.lzo.LzoDecompressor.CompressionStrategy, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new lzo decompressor.
|
|
|
|
@param strategy lzo decompression algorithm
|
|
@param directBufferSize size of the direct-buffer]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="LzoDecompressor"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new lzo decompressor.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="isNativeLzoLoaded" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Check if lzo decompressors are loaded and initialized.
|
|
|
|
@return <code>true</code> if lzo decompressors are loaded & initialized,
|
|
else <code>false</code>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="needsDictionary" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="decompress" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finalize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="LZO_LIBRARY_VERSION" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A {@link Decompressor} based on the lzo algorithm.
|
|
http://www.oberhumer.com/opensource/lzo/]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.lzo.LzoDecompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.lzo.LzoDecompressor.CompressionStrategy -->
|
|
<class name="LzoDecompressor.CompressionStrategy" extends="java.lang.Enum<org.apache.hadoop.io.compress.lzo.LzoDecompressor.CompressionStrategy>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.io.compress.lzo.LzoDecompressor.CompressionStrategy[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.io.compress.lzo.LzoDecompressor.CompressionStrategy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.lzo.LzoDecompressor.CompressionStrategy -->
|
|
</package>
|
|
<package name="org.apache.hadoop.io.compress.zlib">
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater -->
|
|
<class name="BuiltInZlibDeflater" extends="java.util.zip.Deflater"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Compressor"/>
|
|
<constructor name="BuiltInZlibDeflater" type="int, boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="BuiltInZlibDeflater" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="BuiltInZlibDeflater"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="compress" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A wrapper around java.util.zip.Deflater to make it conform
|
|
to org.apache.hadoop.io.compress.Compressor interface.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater -->
|
|
<class name="BuiltInZlibInflater" extends="java.util.zip.Inflater"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<constructor name="BuiltInZlibInflater" type="boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="BuiltInZlibInflater"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="decompress" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A wrapper around java.util.zip.Inflater to make it conform
|
|
to org.apache.hadoop.io.compress.Decompressor interface.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.ZlibCompressor -->
|
|
<class name="ZlibCompressor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Compressor"/>
|
|
<constructor name="ZlibCompressor" type="org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel, org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy, org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new compressor using the specified compression level.
|
|
Compressed data will be generated in ZLIB format.
|
|
|
|
@param level Compression level #CompressionLevel
|
|
@param strategy Compression strategy #CompressionStrategy
|
|
@param header Compression header #CompressionHeader
|
|
@param directBufferSize Size of the direct buffer to be used.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ZlibCompressor"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new compressor with the default compression level.
|
|
Compressed data will be generated in ZLIB format.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finish"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compress" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getBytesWritten" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the total number of compressed bytes output so far.
|
|
|
|
@return the total (non-negative) number of compressed bytes output so far]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesRead" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the total number of uncompressed bytes input so far.</p>
|
|
|
|
@return the total (non-negative) number of uncompressed bytes input so far]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Compressor} based on the popular
|
|
zlib compression algorithm.
|
|
http://www.zlib.net/]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.ZlibCompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader -->
|
|
<class name="ZlibCompressor.CompressionHeader" extends="java.lang.Enum<org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<method name="windowBits" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The type of header for compressed data.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel -->
|
|
<class name="ZlibCompressor.CompressionLevel" extends="java.lang.Enum<org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The compression level for zlib library.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy -->
|
|
<class name="ZlibCompressor.CompressionStrategy" extends="java.lang.Enum<org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The compression level for zlib library.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.ZlibDecompressor -->
|
|
<class name="ZlibDecompressor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.compress.Decompressor"/>
|
|
<constructor name="ZlibDecompressor" type="org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new decompressor.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ZlibDecompressor"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="setDictionary"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="needsInput" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="needsDictionary" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finished" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="decompress" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getBytesWritten" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the total number of compressed bytes output so far.
|
|
|
|
@return the total (non-negative) number of compressed bytes output so far]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBytesRead" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the total number of uncompressed bytes input so far.</p>
|
|
|
|
@return the total (non-negative) number of uncompressed bytes input so far]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="end"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="finalize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Decompressor} based on the popular
|
|
zlib compression algorithm.
|
|
http://www.zlib.net/]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.ZlibDecompressor -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader -->
|
|
<class name="ZlibDecompressor.CompressionHeader" extends="java.lang.Enum<org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<method name="windowBits" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The headers to detect from compressed data.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader -->
|
|
<!-- start class org.apache.hadoop.io.compress.zlib.ZlibFactory -->
|
|
<class name="ZlibFactory" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ZlibFactory"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="isNativeZlibLoaded" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Check if native-zlib code is loaded & initialized correctly and
|
|
can be loaded for this job.
|
|
|
|
@param conf configuration
|
|
@return <code>true</code> if native-zlib is loaded & initialized
|
|
and can be loaded for this job, else <code>false</code>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getZlibCompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Compressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Return the appropriate type of the zlib compressor.
|
|
|
|
@param conf configuration
|
|
@return the appropriate type of the zlib compressor.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getZlibCompressor" return="org.apache.hadoop.io.compress.Compressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Return the appropriate implementation of the zlib compressor.
|
|
|
|
@param conf configuration
|
|
@return the appropriate implementation of the zlib compressor.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getZlibDecompressorType" return="java.lang.Class<? extends org.apache.hadoop.io.compress.Decompressor>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Return the appropriate type of the zlib decompressor.
|
|
|
|
@param conf configuration
|
|
@return the appropriate type of the zlib decompressor.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getZlibDecompressor" return="org.apache.hadoop.io.compress.Decompressor"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Return the appropriate implementation of the zlib decompressor.
|
|
|
|
@param conf configuration
|
|
@return the appropriate implementation of the zlib decompressor.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A collection of factories to create the right
|
|
zlib/gzip compressor/decompressor instances.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.compress.zlib.ZlibFactory -->
|
|
</package>
|
|
<package name="org.apache.hadoop.io.retry">
|
|
<!-- start class org.apache.hadoop.io.retry.RetryPolicies -->
|
|
<class name="RetryPolicies" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RetryPolicies"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="retryUpToMaximumCountWithFixedSleep" return="org.apache.hadoop.io.retry.RetryPolicy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="maxRetries" type="int"/>
|
|
<param name="sleepTime" type="long"/>
|
|
<param name="timeUnit" type="java.util.concurrent.TimeUnit"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Keep trying a limited number of times, waiting a fixed time between attempts,
|
|
and then fail by re-throwing the exception.
|
|
</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="retryUpToMaximumTimeWithFixedSleep" return="org.apache.hadoop.io.retry.RetryPolicy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="maxTime" type="long"/>
|
|
<param name="sleepTime" type="long"/>
|
|
<param name="timeUnit" type="java.util.concurrent.TimeUnit"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Keep trying for a maximum time, waiting a fixed time between attempts,
|
|
and then fail by re-throwing the exception.
|
|
</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="retryUpToMaximumCountWithProportionalSleep" return="org.apache.hadoop.io.retry.RetryPolicy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="maxRetries" type="int"/>
|
|
<param name="sleepTime" type="long"/>
|
|
<param name="timeUnit" type="java.util.concurrent.TimeUnit"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Keep trying a limited number of times, waiting a growing amount of time between attempts,
|
|
and then fail by re-throwing the exception.
|
|
The time between attempts is <code>sleepTime</code> mutliplied by the number of tries so far.
|
|
</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="exponentialBackoffRetry" return="org.apache.hadoop.io.retry.RetryPolicy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="maxRetries" type="int"/>
|
|
<param name="sleepTime" type="long"/>
|
|
<param name="timeUnit" type="java.util.concurrent.TimeUnit"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Keep trying a limited number of times, waiting a growing amount of time between attempts,
|
|
and then fail by re-throwing the exception.
|
|
The time between attempts is <code>sleepTime</code> mutliplied by a random
|
|
number in the range of [0, 2 to the number of retries)
|
|
</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="retryByException" return="org.apache.hadoop.io.retry.RetryPolicy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="defaultPolicy" type="org.apache.hadoop.io.retry.RetryPolicy"/>
|
|
<param name="exceptionToPolicyMap" type="java.util.Map<java.lang.Class<? extends java.lang.Exception>, org.apache.hadoop.io.retry.RetryPolicy>"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Set a default policy with some explicit handlers for specific exceptions.
|
|
</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="retryByRemoteException" return="org.apache.hadoop.io.retry.RetryPolicy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="defaultPolicy" type="org.apache.hadoop.io.retry.RetryPolicy"/>
|
|
<param name="exceptionToPolicyMap" type="java.util.Map<java.lang.Class<? extends java.lang.Exception>, org.apache.hadoop.io.retry.RetryPolicy>"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A retry policy for RemoteException
|
|
Set a default policy with some explicit handlers for specific exceptions.
|
|
</p>]]>
|
|
</doc>
|
|
</method>
|
|
<field name="TRY_ONCE_THEN_FAIL" type="org.apache.hadoop.io.retry.RetryPolicy"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Try once, and fail by re-throwing the exception.
|
|
This corresponds to having no retry mechanism in place.
|
|
</p>]]>
|
|
</doc>
|
|
</field>
|
|
<field name="TRY_ONCE_DONT_FAIL" type="org.apache.hadoop.io.retry.RetryPolicy"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Try once, and fail silently for <code>void</code> methods, or by
|
|
re-throwing the exception for non-<code>void</code> methods.
|
|
</p>]]>
|
|
</doc>
|
|
</field>
|
|
<field name="RETRY_FOREVER" type="org.apache.hadoop.io.retry.RetryPolicy"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Keep trying forever.
|
|
</p>]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A collection of useful implementations of {@link RetryPolicy}.
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.retry.RetryPolicies -->
|
|
<!-- start interface org.apache.hadoop.io.retry.RetryPolicy -->
|
|
<interface name="RetryPolicy" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="shouldRetry" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="e" type="java.lang.Exception"/>
|
|
<param name="retries" type="int"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Determines whether the framework should retry a
|
|
method for the given exception, and the number
|
|
of retries that have been made for that operation
|
|
so far.
|
|
</p>
|
|
@param e The exception that caused the method to fail.
|
|
@param retries The number of times the method has been retried.
|
|
@return <code>true</code> if the method should be retried,
|
|
<code>false</code> if the method should not be retried
|
|
but shouldn't fail with an exception (only for void methods).
|
|
@throws Exception The re-thrown exception <code>e</code> indicating
|
|
that the method failed and should not be retried further.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Specifies a policy for retrying method failures.
|
|
Implementations of this interface should be immutable.
|
|
</p>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.retry.RetryPolicy -->
|
|
<!-- start class org.apache.hadoop.io.retry.RetryProxy -->
|
|
<class name="RetryProxy" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RetryProxy"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="create" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="iface" type="java.lang.Class<?>"/>
|
|
<param name="implementation" type="java.lang.Object"/>
|
|
<param name="retryPolicy" type="org.apache.hadoop.io.retry.RetryPolicy"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Create a proxy for an interface of an implementation class
|
|
using the same retry policy for each method in the interface.
|
|
</p>
|
|
@param iface the interface that the retry will implement
|
|
@param implementation the instance whose methods should be retried
|
|
@param retryPolicy the policy for retirying method call failures
|
|
@return the retry proxy]]>
|
|
</doc>
|
|
</method>
|
|
<method name="create" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="iface" type="java.lang.Class<?>"/>
|
|
<param name="implementation" type="java.lang.Object"/>
|
|
<param name="methodNameToPolicyMap" type="java.util.Map<java.lang.String, org.apache.hadoop.io.retry.RetryPolicy>"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Create a proxy for an interface of an implementation class
|
|
using the a set of retry policies specified by method name.
|
|
If no retry policy is defined for a method then a default of
|
|
{@link RetryPolicies#TRY_ONCE_THEN_FAIL} is used.
|
|
</p>
|
|
@param iface the interface that the retry will implement
|
|
@param implementation the instance whose methods should be retried
|
|
@param methodNameToPolicyMap a map of method names to retry policies
|
|
@return the retry proxy]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A factory for creating retry proxies.
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.retry.RetryProxy -->
|
|
</package>
|
|
<package name="org.apache.hadoop.io.serializer">
|
|
<!-- start interface org.apache.hadoop.io.serializer.Deserializer -->
|
|
<interface name="Deserializer" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="open"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.InputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>Prepare the deserializer for reading.</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="deserialize" return="T"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="t" type="T"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Deserialize the next object from the underlying input stream.
|
|
If the object <code>t</code> is non-null then this deserializer
|
|
<i>may</i> set its internal state to the next object read from the input
|
|
stream. Otherwise, if the object <code>t</code> is null a new
|
|
deserialized object will be created.
|
|
</p>
|
|
@return the deserialized object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>Close the underlying input stream and clear up any resources.</p>]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Provides a facility for deserializing objects of type <T> from an
|
|
{@link InputStream}.
|
|
</p>
|
|
|
|
<p>
|
|
Deserializers are stateful, but must not buffer the input since
|
|
other producers may read from the input between calls to
|
|
{@link #deserialize(Object)}.
|
|
</p>
|
|
@param <T>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.serializer.Deserializer -->
|
|
<!-- start class org.apache.hadoop.io.serializer.DeserializerComparator -->
|
|
<class name="DeserializerComparator" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.RawComparator<T>"/>
|
|
<constructor name="DeserializerComparator" type="org.apache.hadoop.io.serializer.Deserializer<T>"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A {@link RawComparator} that uses a {@link Deserializer} to deserialize
|
|
the objects to be compared so that the standard {@link Comparator} can
|
|
be used to compare them.
|
|
</p>
|
|
<p>
|
|
One may optimize compare-intensive operations by using a custom
|
|
implementation of {@link RawComparator} that operates directly
|
|
on byte representations.
|
|
</p>
|
|
@param <T>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.serializer.DeserializerComparator -->
|
|
<!-- start class org.apache.hadoop.io.serializer.JavaSerialization -->
|
|
<class name="JavaSerialization" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.serializer.Serialization<java.io.Serializable>"/>
|
|
<constructor name="JavaSerialization"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<?>"/>
|
|
</method>
|
|
<method name="getDeserializer" return="org.apache.hadoop.io.serializer.Deserializer<java.io.Serializable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<java.io.Serializable>"/>
|
|
</method>
|
|
<method name="getSerializer" return="org.apache.hadoop.io.serializer.Serializer<java.io.Serializable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<java.io.Serializable>"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
An experimental {@link Serialization} for Java {@link Serializable} classes.
|
|
</p>
|
|
@see JavaSerializationComparator]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.serializer.JavaSerialization -->
|
|
<!-- start class org.apache.hadoop.io.serializer.JavaSerializationComparator -->
|
|
<class name="JavaSerializationComparator" extends="org.apache.hadoop.io.serializer.DeserializerComparator<T>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JavaSerializationComparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o1" type="T extends java.io.Serializable & java.lang.Comparable<T>"/>
|
|
<param name="o2" type="T extends java.io.Serializable & java.lang.Comparable<T>"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A {@link RawComparator} that uses a {@link JavaSerialization}
|
|
{@link Deserializer} to deserialize objects that are then compared via
|
|
their {@link Comparable} interfaces.
|
|
</p>
|
|
@param <T>
|
|
@see JavaSerialization]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.serializer.JavaSerializationComparator -->
|
|
<!-- start interface org.apache.hadoop.io.serializer.Serialization -->
|
|
<interface name="Serialization" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Allows clients to test whether this {@link Serialization}
|
|
supports the given class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSerializer" return="org.apache.hadoop.io.serializer.Serializer<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<T>"/>
|
|
<doc>
|
|
<![CDATA[@return a {@link Serializer} for the given class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDeserializer" return="org.apache.hadoop.io.serializer.Deserializer<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<T>"/>
|
|
<doc>
|
|
<![CDATA[@return a {@link Deserializer} for the given class.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Encapsulates a {@link Serializer}/{@link Deserializer} pair.
|
|
</p>
|
|
@param <T>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.serializer.Serialization -->
|
|
<!-- start class org.apache.hadoop.io.serializer.SerializationFactory -->
|
|
<class name="SerializationFactory" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SerializationFactory" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Serializations are found by reading the <code>io.serializations</code>
|
|
property from <code>conf</code>, which is a comma-delimited list of
|
|
classnames.
|
|
</p>]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getSerializer" return="org.apache.hadoop.io.serializer.Serializer<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<T>"/>
|
|
</method>
|
|
<method name="getDeserializer" return="org.apache.hadoop.io.serializer.Deserializer<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<T>"/>
|
|
</method>
|
|
<method name="getSerialization" return="org.apache.hadoop.io.serializer.Serialization<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<T>"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
A factory for {@link Serialization}s.
|
|
</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.serializer.SerializationFactory -->
|
|
<!-- start interface org.apache.hadoop.io.serializer.Serializer -->
|
|
<interface name="Serializer" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="open"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.OutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>Prepare the serializer for writing.</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="serialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="t" type="T"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>Serialize <code>t</code> to the underlying output stream.</p>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<p>Close the underlying output stream and clear up any resources.</p>]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<p>
|
|
Provides a facility for serializing objects of type <T> to an
|
|
{@link OutputStream}.
|
|
</p>
|
|
|
|
<p>
|
|
Serializers are stateful, but must not buffer the output since
|
|
other producers may write to the output between calls to
|
|
{@link #serialize(Object)}.
|
|
</p>
|
|
@param <T>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.io.serializer.Serializer -->
|
|
<!-- start class org.apache.hadoop.io.serializer.WritableSerialization -->
|
|
<class name="WritableSerialization" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.serializer.Serialization<org.apache.hadoop.io.Writable>"/>
|
|
<constructor name="WritableSerialization"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<?>"/>
|
|
</method>
|
|
<method name="getDeserializer" return="org.apache.hadoop.io.serializer.Deserializer<org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<org.apache.hadoop.io.Writable>"/>
|
|
</method>
|
|
<method name="getSerializer" return="org.apache.hadoop.io.serializer.Serializer<org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<org.apache.hadoop.io.Writable>"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Serialization} for {@link Writable}s that delegates to
|
|
{@link Writable#write(java.io.DataOutput)} and
|
|
{@link Writable#readFields(java.io.DataInput)}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.io.serializer.WritableSerialization -->
|
|
</package>
|
|
<package name="org.apache.hadoop.ipc">
|
|
<!-- start class org.apache.hadoop.ipc.Client -->
|
|
<class name="Client" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Client" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>, org.apache.hadoop.conf.Configuration, javax.net.SocketFactory"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct an IPC client whose values are of the given {@link Writable}
|
|
class.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Client" type="java.lang.Class<? extends org.apache.hadoop.io.Writable>, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct an IPC client with the default SocketFactory
|
|
@param valueClass
|
|
@param conf]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setPingInterval"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="pingInterval" type="int"/>
|
|
<doc>
|
|
<![CDATA[set the ping interval value in configuration
|
|
|
|
@param conf Configuration
|
|
@param pingInterval the ping interval]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stop"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Stop all threads related to this client. No further calls may be made
|
|
using this client.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="call" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="param" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="address" type="java.net.InetSocketAddress"/>
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Make a call, passing <code>param</code>, to the IPC server running at
|
|
<code>address</code>, returning the value. Throws exceptions if there are
|
|
network problems or if the remote code threw an exception.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="call" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="param" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="addr" type="java.net.InetSocketAddress"/>
|
|
<param name="ticket" type="org.apache.hadoop.security.UserGroupInformation"/>
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="call" return="org.apache.hadoop.io.Writable[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="params" type="org.apache.hadoop.io.Writable[]"/>
|
|
<param name="addresses" type="java.net.InetSocketAddress[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Makes a set of calls in parallel. Each parameter is sent to the
|
|
corresponding address. When all values are available, or have timed out
|
|
or errored, the collected results are returned in an array. The array
|
|
contains nulls for calls that timed out or errored.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A client for an IPC service. IPC calls take a single {@link Writable} as a
|
|
parameter, and return a {@link Writable} as their value. A service runs on
|
|
a port and is defined by a parameter class and a value class.
|
|
|
|
@see Server]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.ipc.Client -->
|
|
<!-- start class org.apache.hadoop.ipc.RemoteException -->
|
|
<class name="RemoteException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RemoteException" type="java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getClassName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="unwrapRemoteException" return="java.io.IOException"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="lookupTypes" type="java.lang.Class[]"/>
|
|
<doc>
|
|
<![CDATA[If this remote exception wraps up one of the lookupTypes
|
|
then return this exception.
|
|
<p>
|
|
Unwraps any IOException.
|
|
|
|
@param lookupTypes the desired exception class.
|
|
@return IOException, which is either the lookupClass exception or this.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unwrapRemoteException" return="java.io.IOException"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Instantiate and return the exception wrapped up by this remote exception.
|
|
|
|
<p> This unwraps any <code>Throwable</code> that has a constructor taking
|
|
a <code>String</code> as a parameter.
|
|
Otherwise it returns this.
|
|
|
|
@return <code>Throwable]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeXml"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="java.lang.String"/>
|
|
<param name="doc" type="org.znerd.xmlenc.XMLOutputter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write the object to XML format]]>
|
|
</doc>
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.ipc.RemoteException"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="attrs" type="org.xml.sax.Attributes"/>
|
|
<doc>
|
|
<![CDATA[Create RemoteException from attributes]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.ipc.RemoteException -->
|
|
<!-- start class org.apache.hadoop.ipc.RPC -->
|
|
<class name="RPC" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="waitForProxy" return="org.apache.hadoop.ipc.VersionedProtocol"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.Class"/>
|
|
<param name="clientVersion" type="long"/>
|
|
<param name="addr" type="java.net.InetSocketAddress"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getProxy" return="org.apache.hadoop.ipc.VersionedProtocol"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.Class<?>"/>
|
|
<param name="clientVersion" type="long"/>
|
|
<param name="addr" type="java.net.InetSocketAddress"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="factory" type="javax.net.SocketFactory"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a client-side proxy object that implements the named protocol,
|
|
talking to a server at the named address.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProxy" return="org.apache.hadoop.ipc.VersionedProtocol"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.Class<?>"/>
|
|
<param name="clientVersion" type="long"/>
|
|
<param name="addr" type="java.net.InetSocketAddress"/>
|
|
<param name="ticket" type="org.apache.hadoop.security.UserGroupInformation"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="factory" type="javax.net.SocketFactory"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a client-side proxy object that implements the named protocol,
|
|
talking to a server at the named address.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProxy" return="org.apache.hadoop.ipc.VersionedProtocol"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.Class<?>"/>
|
|
<param name="clientVersion" type="long"/>
|
|
<param name="addr" type="java.net.InetSocketAddress"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a client-side proxy object with the default SocketFactory
|
|
|
|
@param protocol
|
|
@param clientVersion
|
|
@param addr
|
|
@param conf
|
|
@return a proxy instance
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stopProxy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="proxy" type="org.apache.hadoop.ipc.VersionedProtocol"/>
|
|
<doc>
|
|
<![CDATA[Stop this proxy and release its invoker's resource
|
|
@param proxy the proxy to be stopped]]>
|
|
</doc>
|
|
</method>
|
|
<method name="call" return="java.lang.Object[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="method" type="java.lang.reflect.Method"/>
|
|
<param name="params" type="java.lang.Object[][]"/>
|
|
<param name="addrs" type="java.net.InetSocketAddress[]"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Expert: Make multiple, parallel calls to a set of servers.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getServer" return="org.apache.hadoop.ipc.RPC.Server"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="instance" type="java.lang.Object"/>
|
|
<param name="bindAddress" type="java.lang.String"/>
|
|
<param name="port" type="int"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a server for a protocol implementation instance listening on a
|
|
port and address.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getServer" return="org.apache.hadoop.ipc.RPC.Server"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="instance" type="java.lang.Object"/>
|
|
<param name="bindAddress" type="java.lang.String"/>
|
|
<param name="port" type="int"/>
|
|
<param name="numHandlers" type="int"/>
|
|
<param name="verbose" type="boolean"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a server for a protocol implementation instance listening on a
|
|
port and address.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A simple RPC mechanism.
|
|
|
|
A <i>protocol</i> is a Java interface. All parameters and return types must
|
|
be one of:
|
|
|
|
<ul> <li>a primitive type, <code>boolean</code>, <code>byte</code>,
|
|
<code>char</code>, <code>short</code>, <code>int</code>, <code>long</code>,
|
|
<code>float</code>, <code>double</code>, or <code>void</code>; or</li>
|
|
|
|
<li>a {@link String}; or</li>
|
|
|
|
<li>a {@link Writable}; or</li>
|
|
|
|
<li>an array of the above types</li> </ul>
|
|
|
|
All methods in the protocol should throw only IOException. No field data of
|
|
the protocol instance is transmitted.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.ipc.RPC -->
|
|
<!-- start class org.apache.hadoop.ipc.RPC.Server -->
|
|
<class name="RPC.Server" extends="org.apache.hadoop.ipc.Server"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RPC.Server" type="java.lang.Object, org.apache.hadoop.conf.Configuration, java.lang.String, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct an RPC server.
|
|
@param instance the instance whose methods will be called
|
|
@param conf the configuration to use
|
|
@param bindAddress the address to bind on to listen for connection
|
|
@param port the port to listen for connections on]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="RPC.Server" type="java.lang.Object, org.apache.hadoop.conf.Configuration, java.lang.String, int, int, boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct an RPC server.
|
|
@param instance the instance whose methods will be called
|
|
@param conf the configuration to use
|
|
@param bindAddress the address to bind on to listen for connection
|
|
@param port the port to listen for connections on
|
|
@param numHandlers the number of method handler threads to run
|
|
@param verbose whether each call should be logged]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="call" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="param" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="receivedTime" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An RPC Server.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.ipc.RPC.Server -->
|
|
<!-- start class org.apache.hadoop.ipc.RPC.VersionMismatch -->
|
|
<class name="RPC.VersionMismatch" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RPC.VersionMismatch" type="java.lang.String, long, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a version mismatch exception
|
|
@param interfaceName the name of the protocol mismatch
|
|
@param clientVersion the client's version of the protocol
|
|
@param serverVersion the server's version of the protocol]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getInterfaceName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the interface name
|
|
@return the java class name
|
|
(eg. org.apache.hadoop.mapred.InterTrackerProtocol)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClientVersion" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the client's preferred version]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getServerVersion" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the server's agreed to version.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A version mismatch for the RPC protocol.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.ipc.RPC.VersionMismatch -->
|
|
<!-- start class org.apache.hadoop.ipc.Server -->
|
|
<class name="Server" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Server" type="java.lang.String, int, java.lang.Class<? extends org.apache.hadoop.io.Writable>, int, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<constructor name="Server" type="java.lang.String, int, java.lang.Class<? extends org.apache.hadoop.io.Writable>, int, org.apache.hadoop.conf.Configuration, java.lang.String"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Constructs a server listening on the named port and address. Parameters passed must
|
|
be of the named class. The <code>handlerCount</handlerCount> determines
|
|
the number of handler threads that will be used to process calls.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="get" return="org.apache.hadoop.ipc.Server"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the server instance called under or null. May be called under
|
|
{@link #call(Writable, long)} implementations, and under {@link Writable}
|
|
methods of paramters and return values. Permits applications to access
|
|
the server context.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRemoteIp" return="java.net.InetAddress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the remote side ip address when invoked inside an RPC
|
|
Returns null incase of an error.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRemoteAddress" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns remote address as a string when invoked inside an RPC.
|
|
Returns null in case of an error.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="bind"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="socket" type="java.net.ServerSocket"/>
|
|
<param name="address" type="java.net.InetSocketAddress"/>
|
|
<param name="backlog" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[A convenience method to bind to a given address and report
|
|
better exceptions if the address is not a valid host.
|
|
@param socket the socket to bind
|
|
@param address the address to bind to
|
|
@param backlog the number of connections allowed in the queue
|
|
@throws BindException if the address can't be bound
|
|
@throws UnknownHostException if the address isn't a valid host name
|
|
@throws IOException other random errors from bind]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSocketSendBufSize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="size" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the socket buffer size used for responding to RPCs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="start"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Starts the service. Must be called before any calls will be handled.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stop"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Stops the service. No new calls will be handled after this is called.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="join"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<doc>
|
|
<![CDATA[Wait for the server to be stopped.
|
|
Does not wait for all subthreads to finish.
|
|
See {@link #stop()}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getListenerAddress" return="java.net.InetSocketAddress"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the socket (ip+port) on which the RPC server is listening to.
|
|
@return the socket (ip+port) on which the RPC server is listening to.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="call" return="org.apache.hadoop.io.Writable"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="param" type="org.apache.hadoop.io.Writable"/>
|
|
<param name="receiveTime" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called for each call.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumOpenConnections" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of open RPC conections
|
|
@return the number of open rpc connections]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCallQueueLen" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of rpc calls in the queue.
|
|
@return The number of rpc calls in the queue.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="HEADER" type="java.nio.ByteBuffer"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The first four bytes of Hadoop RPC connections]]>
|
|
</doc>
|
|
</field>
|
|
<field name="CURRENT_VERSION" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="rpcMetrics" type="org.apache.hadoop.ipc.metrics.RpcMetrics"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An abstract IPC service. IPC calls take a single {@link Writable} as a
|
|
parameter, and return a {@link Writable} as their value. A service runs on
|
|
a port and is defined by a parameter class and a value class.
|
|
|
|
@see Client]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.ipc.Server -->
|
|
<!-- start interface org.apache.hadoop.ipc.VersionedProtocol -->
|
|
<interface name="VersionedProtocol" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getProtocolVersion" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.String"/>
|
|
<param name="clientVersion" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return protocol version corresponding to protocol interface.
|
|
@param protocol The classname of the protocol interface
|
|
@param clientVersion The version of the protocol that the client speaks
|
|
@return the version that the server will speak]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Superclass of all protocols that use Hadoop RPC.
|
|
Subclasses of this interface are also supposed to have
|
|
a static final long versionID field.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.ipc.VersionedProtocol -->
|
|
</package>
|
|
<package name="org.apache.hadoop.ipc.metrics">
|
|
<!-- start class org.apache.hadoop.ipc.metrics.RpcMetrics -->
|
|
<class name="RpcMetrics" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.metrics.Updater"/>
|
|
<constructor name="RpcMetrics" type="java.lang.String, java.lang.String, org.apache.hadoop.ipc.Server"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="doUpdates"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.metrics.MetricsContext"/>
|
|
<doc>
|
|
<![CDATA[Push the metrics to the monitoring subsystem on doUpdate() call.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="shutdown"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="rpcQueueTime" type="org.apache.hadoop.metrics.util.MetricsTimeVaryingRate"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The metrics variables are public:
|
|
- they can be set directly by calling their set/inc methods
|
|
-they can also be read directly - e.g. JMX does this.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="rpcProcessingTime" type="org.apache.hadoop.metrics.util.MetricsTimeVaryingRate"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="metricsList" type="java.util.Map<java.lang.String, org.apache.hadoop.metrics.util.MetricsTimeVaryingRate>"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This class is for maintaining the various RPC statistics
|
|
and publishing them through the metrics interfaces.
|
|
This also registers the JMX MBean for RPC.
|
|
<p>
|
|
This class has a number of metrics variables that are publicly accessible;
|
|
these variables (objects) have methods to update their values;
|
|
for example:
|
|
<p> {@link #rpcQueueTime}.inc(time)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.ipc.metrics.RpcMetrics -->
|
|
<!-- start interface org.apache.hadoop.ipc.metrics.RpcMgtMBean -->
|
|
<interface name="RpcMgtMBean" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getRpcOpsNumber" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Number of RPC Operations in the last interval
|
|
@return number of operations]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRpcOpsAvgProcessingTime" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Average time for RPC Operations in last interval
|
|
@return time in msec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRpcOpsAvgProcessingTimeMin" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The Minimum RPC Operation Processing Time since reset was called
|
|
@return time in msec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRpcOpsAvgProcessingTimeMax" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The Maximum RPC Operation Processing Time since reset was called
|
|
@return time in msec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRpcOpsAvgQueueTime" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The Average RPC Operation Queued Time in the last interval
|
|
@return time in msec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRpcOpsAvgQueueTimeMin" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The Minimum RPC Operation Queued Time since reset was called
|
|
@return time in msec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRpcOpsAvgQueueTimeMax" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The Maximum RPC Operation Queued Time since reset was called
|
|
@return time in msec]]>
|
|
</doc>
|
|
</method>
|
|
<method name="resetAllMinMax"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Reset all min max times]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumOpenConnections" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of open RPC conections
|
|
@return the number of open rpc connections]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCallQueueLen" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of rpc calls in the queue.
|
|
@return The number of rpc calls in the queue.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This is the JMX management interface for the RPC layer.
|
|
Many of the statistics are sampled and averaged on an interval
|
|
which can be specified in the metrics config file.
|
|
<p>
|
|
For the statistics that are sampled and averaged, one must specify
|
|
a metrics context that does periodic update calls. Most do.
|
|
The default Null metrics context however does NOT. So if you aren't
|
|
using any other metrics context then you can turn on the viewing and averaging
|
|
of sampled metrics by specifying the following two lines
|
|
in the hadoop-meterics.properties file:
|
|
<pre>
|
|
rpc.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
|
|
rpc.period=10
|
|
</pre>
|
|
<p>
|
|
Note that the metrics are collected regardless of the context used.
|
|
The context with the update thread is used to average the data periodically]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.ipc.metrics.RpcMgtMBean -->
|
|
</package>
|
|
<package name="org.apache.hadoop.log">
|
|
<!-- start class org.apache.hadoop.log.LogLevel -->
|
|
<class name="LogLevel" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LogLevel"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[A command line implementation]]>
|
|
</doc>
|
|
</method>
|
|
<field name="USAGES" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Change log level in runtime.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.log.LogLevel -->
|
|
<!-- start class org.apache.hadoop.log.LogLevel.Servlet -->
|
|
<class name="LogLevel.Servlet" extends="javax.servlet.http.HttpServlet"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LogLevel.Servlet"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="doGet"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="request" type="javax.servlet.http.HttpServletRequest"/>
|
|
<param name="response" type="javax.servlet.http.HttpServletResponse"/>
|
|
<exception name="ServletException" type="javax.servlet.ServletException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A servlet implementation]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.log.LogLevel.Servlet -->
|
|
</package>
|
|
<package name="org.apache.hadoop.mapred">
|
|
<!-- start class org.apache.hadoop.mapred.ClusterStatus -->
|
|
<class name="ClusterStatus" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<method name="getTaskTrackers" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the number of task trackers in the cluster.
|
|
|
|
@return the number of task trackers in the cluster.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapTasks" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the number of currently running map tasks in the cluster.
|
|
|
|
@return the number of currently running map tasks in the cluster.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReduceTasks" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the number of currently running reduce tasks in the cluster.
|
|
|
|
@return the number of currently running reduce tasks in the cluster.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxMapTasks" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the maximum capacity for running map tasks in the cluster.
|
|
|
|
@return the maximum capacity for running map tasks in the cluster.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxReduceTasks" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the maximum capacity for running reduce tasks in the cluster.
|
|
|
|
@return the maximum capacity for running reduce tasks in the cluster.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobTrackerState" return="org.apache.hadoop.mapred.JobTracker.State"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the current state of the <code>JobTracker</code>,
|
|
as {@link JobTracker.State}
|
|
|
|
@return the current state of the <code>JobTracker</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Status information on the current state of the Map-Reduce cluster.
|
|
|
|
<p><code>ClusterStatus</code> provides clients with information such as:
|
|
<ol>
|
|
<li>
|
|
Size of the cluster.
|
|
</li>
|
|
<li>
|
|
Task capacity of the cluster.
|
|
</li>
|
|
<li>
|
|
The number of currently running map & reduce tasks.
|
|
</li>
|
|
<li>
|
|
State of the <code>JobTracker</code>.
|
|
</li>
|
|
</ol></p>
|
|
|
|
<p>Clients can query for the latest <code>ClusterStatus</code>, via
|
|
{@link JobClient#getClusterStatus()}.</p>
|
|
|
|
@see JobClient]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.ClusterStatus -->
|
|
<!-- start class org.apache.hadoop.mapred.Counters -->
|
|
<class name="Counters" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="java.lang.Iterable<org.apache.hadoop.mapred.Counters.Group>"/>
|
|
<constructor name="Counters"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getGroupNames" return="java.util.Collection<java.lang.String>"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the names of all counter classes.
|
|
@return Set of counter names.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="iterator" return="java.util.Iterator<org.apache.hadoop.mapred.Counters.Group>"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getGroup" return="org.apache.hadoop.mapred.Counters.Group"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="groupName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns the named counter group, or an empty group if there is none
|
|
with the specified name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="findCounter" return="org.apache.hadoop.mapred.Counters.Counter"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Enum"/>
|
|
<doc>
|
|
<![CDATA[Find the counter for the given enum. The same enum will always return the
|
|
same counter.
|
|
@param key the counter key
|
|
@return the matching counter object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="findCounter" return="org.apache.hadoop.mapred.Counters.Counter"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="group" type="java.lang.String"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Find a counter given the group and the name.
|
|
@param group the name of the group
|
|
@param name the internal name of the counter
|
|
@return the counter for that name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="findCounter" return="org.apache.hadoop.mapred.Counters.Counter"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="deprecated, no comment">
|
|
<param name="group" type="java.lang.String"/>
|
|
<param name="id" type="int"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Find a counter by using strings
|
|
@param group the name of the group
|
|
@param id the id of the counter within the group (0 to N-1)
|
|
@param name the internal name of the counter
|
|
@return the counter for that name
|
|
@deprecated]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrCounter"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Enum"/>
|
|
<param name="amount" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increments the specified counter by the specified amount, creating it if
|
|
it didn't already exist.
|
|
@param key identifies a counter
|
|
@param amount amount by which counter is to be incremented]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrCounter"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="group" type="java.lang.String"/>
|
|
<param name="counter" type="java.lang.String"/>
|
|
<param name="amount" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increments the specified counter by the specified amount, creating it if
|
|
it didn't already exist.
|
|
@param group the name of the group
|
|
@param counter the internal name of the counter
|
|
@param amount amount by which counter is to be incremented]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounter" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Enum"/>
|
|
<doc>
|
|
<![CDATA[Returns current value of the specified counter, or 0 if the counter
|
|
does not exist.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrAllCounters"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.mapred.Counters"/>
|
|
<doc>
|
|
<![CDATA[Increments multiple counters by their amounts in another Counters
|
|
instance.
|
|
@param other the other Counters instance]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sum" return="org.apache.hadoop.mapred.Counters"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="a" type="org.apache.hadoop.mapred.Counters"/>
|
|
<param name="b" type="org.apache.hadoop.mapred.Counters"/>
|
|
<doc>
|
|
<![CDATA[Convenience method for computing the sum of two sets of counters.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="size" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the total number of counters, by summing the number of counters
|
|
in each group.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write the set of groups.
|
|
The external format is:
|
|
#groups (groupName group)*
|
|
|
|
i.e. the number of groups followed by 0 or more groups, where each
|
|
group is of the form:
|
|
|
|
groupDisplayName #counters (false | true counter)*
|
|
|
|
where each counter is of the form:
|
|
|
|
name (false | true displayName) value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a set of groups.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="log"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="log" type="org.apache.commons.logging.Log"/>
|
|
<doc>
|
|
<![CDATA[Logs the current counter values.
|
|
@param log The log to use.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return textual representation of the counter values.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeCompactString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Convert a counters object into a single line that is easy to parse.
|
|
@return the string with "name=value" for each counter and separated by ","]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeEscapedCompactString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Represent the counter in a textual format that can be converted back to
|
|
its object form
|
|
@return the string in the following format
|
|
{(groupname)(group-displayname)[(countername)(displayname)(value)][][]}{}{}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fromEscapedCompactString" return="org.apache.hadoop.mapred.Counters"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="compactString" type="java.lang.String"/>
|
|
<exception name="ParseException" type="java.text.ParseException"/>
|
|
<doc>
|
|
<![CDATA[Convert a stringified counter representation into a counter object. Note
|
|
that the counter can be recovered if its stringified using
|
|
{@link #makeEscapedCompactString()}.
|
|
@return a Counter]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A set of named counters.
|
|
|
|
<p><code>Counters</code> represent global counters, defined either by the
|
|
Map-Reduce framework or applications. Each <code>Counter</code> can be of
|
|
any {@link Enum} type.</p>
|
|
|
|
<p><code>Counters</code> are bunched into {@link Group}s, each comprising of
|
|
counters from a particular <code>Enum</code> class.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.Counters -->
|
|
<!-- start class org.apache.hadoop.mapred.Counters.Counter -->
|
|
<class name="Counters.Counter" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the binary representation of the counter]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write the binary representation of the counter]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the internal name of the counter.
|
|
@return the internal name of the counter]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDisplayName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the name of the counter.
|
|
@return the user facing name of the counter]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDisplayName"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="displayName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the display name of the counter.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeEscapedCompactString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the compact stringified version of the counter in the format
|
|
[(actual-name)(display-name)(value)]]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounter" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[What is the current value of this counter?
|
|
@return the current value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="increment"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="incr" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increment this counter by the given value
|
|
@param incr the value to increase this counter by]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A counter record, comprising its name and value.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.Counters.Counter -->
|
|
<!-- start class org.apache.hadoop.mapred.Counters.Group -->
|
|
<class name="Counters.Group" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="java.lang.Iterable<org.apache.hadoop.mapred.Counters.Counter>"/>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns raw name of the group. This is the name of the enum class
|
|
for this group of counters.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDisplayName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns localized name of the group. This is the same as getName() by
|
|
default, but different if an appropriate ResourceBundle is found.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDisplayName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="displayName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the display name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="makeEscapedCompactString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the compact stringified version of the group in the format
|
|
{(actual-name)(display-name)(value)[][][]} where [] are compact strings for the
|
|
counters within.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounter" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="counterName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns the value of the specified counter, or 0 if the counter does
|
|
not exist.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounter" return="org.apache.hadoop.mapred.Counters.Counter"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use {@link #getCounter(String)} instead">
|
|
<param name="id" type="int"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the counter for the given id and create it if it doesn't exist.
|
|
@param id the numeric id of the counter within the group
|
|
@param name the internal counter name
|
|
@return the counter
|
|
@deprecated use {@link #getCounter(String)} instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounterForName" return="org.apache.hadoop.mapred.Counters.Counter"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the counter for the given name and create it if it doesn't exist.
|
|
@param name the internal counter name
|
|
@return the counter]]>
|
|
</doc>
|
|
</method>
|
|
<method name="size" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the number of counters in this group.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="iterator" return="java.util.Iterator<org.apache.hadoop.mapred.Counters.Counter>"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>Group</code> of counters, comprising of counters from a particular
|
|
counter {@link Enum} class.
|
|
|
|
<p><code>Group</code>handles localization of the class name and the
|
|
counter names.</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.Counters.Group -->
|
|
<!-- start class org.apache.hadoop.mapred.DefaultJobHistoryParser -->
|
|
<class name="DefaultJobHistoryParser" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DefaultJobHistoryParser"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="parseJobTasks"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobHistoryFile" type="java.lang.String"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobHistory.JobInfo"/>
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Populates a JobInfo object from the job's history log file.
|
|
@param jobHistoryFile history file for this job.
|
|
@param job a precreated JobInfo object, should be non-null.
|
|
@param fs FileSystem where historyFile is present.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Default parser for job history files. It creates object model from
|
|
job history file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.DefaultJobHistoryParser -->
|
|
<!-- start class org.apache.hadoop.mapred.FileAlreadyExistsException -->
|
|
<class name="FileAlreadyExistsException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FileAlreadyExistsException"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="FileAlreadyExistsException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Used when target file already exists for any operation and
|
|
is not configured to be overwritten.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.FileAlreadyExistsException -->
|
|
<!-- start class org.apache.hadoop.mapred.FileInputFormat -->
|
|
<class name="FileInputFormat" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputFormat<K, V>"/>
|
|
<constructor name="FileInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setMinSplitSize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="minSplitSize" type="long"/>
|
|
</method>
|
|
<method name="isSplitable" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="filename" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Is the given filename splitable? Usually, true, but if the file is
|
|
stream compressed, it will not be.
|
|
|
|
<code>FileInputFormat</code> implementations can override this and return
|
|
<code>false</code> to ensure that individual input files are never split-up
|
|
so that {@link Mapper}s process entire files.
|
|
|
|
@param fs the file system that the file is on
|
|
@param filename the file name to check
|
|
@return is this file splitable?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<K, V>"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setInputPathFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="filter" type="java.lang.Class<? extends org.apache.hadoop.fs.PathFilter>"/>
|
|
<doc>
|
|
<![CDATA[Set a PathFilter to be applied to the input paths for the map-reduce job.
|
|
|
|
@param filter the PathFilter class use for filtering the input paths.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInputPathFilter" return="org.apache.hadoop.fs.PathFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get a PathFilter instance of the filter set for the input paths.
|
|
|
|
@return the PathFilter instance set for the job, NULL if none has been set.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[List input directories.
|
|
Subclasses may override to, e.g., select only files matching a regular
|
|
expression.
|
|
|
|
@param job the job to list input paths for
|
|
@return array of FileStatus objects
|
|
@throws IOException if zero items.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSplits" return="org.apache.hadoop.mapred.InputSplit[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="numSplits" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Splits files returned by {@link #listStatus(JobConf)} when
|
|
they're too big.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="computeSplitSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="goalSize" type="long"/>
|
|
<param name="minSize" type="long"/>
|
|
<param name="blockSize" type="long"/>
|
|
</method>
|
|
<method name="getBlockIndex" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="blkLocations" type="org.apache.hadoop.fs.BlockLocation[]"/>
|
|
<param name="offset" type="long"/>
|
|
</method>
|
|
<method name="setInputPaths"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="commaSeparatedPaths" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets the given comma separated paths as the list of inputs
|
|
for the map-reduce job.
|
|
|
|
@param conf Configuration of the job
|
|
@param commaSeparatedPaths Comma separated paths to be set as
|
|
the list of inputs for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addInputPaths"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="commaSeparatedPaths" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Add the given comma separated paths to the list of inputs for
|
|
the map-reduce job.
|
|
|
|
@param conf The configuration of the job
|
|
@param commaSeparatedPaths Comma separated paths to be added to
|
|
the list of inputs for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInputPaths"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="inputPaths" type="org.apache.hadoop.fs.Path[]"/>
|
|
<doc>
|
|
<![CDATA[Set the array of {@link Path}s as the list of inputs
|
|
for the map-reduce job.
|
|
|
|
@param conf Configuration of the job.
|
|
@param inputPaths the {@link Path}s of the input directories/files
|
|
for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addInputPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Add a {@link Path} to the list of inputs for the map-reduce job.
|
|
|
|
@param conf The configuration of the job
|
|
@param path {@link Path} to be added to the list of inputs for
|
|
the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInputPaths" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the list of input {@link Path}s for the map-reduce job.
|
|
|
|
@param conf The configuration of the job
|
|
@return the list of input {@link Path}s for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A base class for file-based {@link InputFormat}.
|
|
|
|
<p><code>FileInputFormat</code> is the base class for all file-based
|
|
<code>InputFormat</code>s. This provides a generic implementation of
|
|
{@link #getSplits(JobConf, int)}.
|
|
Subclasses of <code>FileInputFormat</code> can also override the
|
|
{@link #isSplitable(FileSystem, Path)} method to ensure input-files are
|
|
not split-up and are processed as a whole by {@link Mapper}s.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.FileInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.FileOutputCommitter -->
|
|
<class name="FileOutputCommitter" extends="org.apache.hadoop.mapred.OutputCommitter"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FileOutputCommitter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setupJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.mapred.JobContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="cleanupJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.mapred.JobContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setupTask"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="commitTask"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="abortTask"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
</method>
|
|
<method name="needsTaskCommit" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="TEMP_DIR_NAME" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Temporary directory name]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An {@link OutputCommitter} that commits files specified
|
|
in job output directory i.e. ${mapred.output.dir}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.FileOutputCommitter -->
|
|
<!-- start class org.apache.hadoop.mapred.FileOutputFormat -->
|
|
<class name="FileOutputFormat" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.OutputFormat<K, V>"/>
|
|
<constructor name="FileOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setCompressOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="compress" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether the output of the job is compressed.
|
|
@param conf the {@link JobConf} to modify
|
|
@param compress should the output of the job be compressed?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCompressOutput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Is the job output compressed?
|
|
@param conf the {@link JobConf} to look in
|
|
@return <code>true</code> if the job output should be compressed,
|
|
<code>false</code> otherwise]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputCompressorClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="codecClass" type="java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link CompressionCodec} to be used to compress job outputs.
|
|
@param conf the {@link JobConf} to modify
|
|
@param codecClass the {@link CompressionCodec} to be used to
|
|
compress the job outputs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputCompressorClass" return="java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="defaultValue" type="java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec>"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link CompressionCodec} for compressing the job outputs.
|
|
@param conf the {@link JobConf} to look in
|
|
@param defaultValue the {@link CompressionCodec} to return if not set
|
|
@return the {@link CompressionCodec} to be used to compress the
|
|
job outputs
|
|
@throws IllegalArgumentException if the class was specified, but not found]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="checkOutputSpecs"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="FileAlreadyExistsException" type="org.apache.hadoop.mapred.FileAlreadyExistsException"/>
|
|
<exception name="InvalidJobConfException" type="org.apache.hadoop.mapred.InvalidJobConfException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setOutputPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="outputDir" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link Path} of the output directory for the map-reduce job.
|
|
|
|
@param conf The configuration of the job.
|
|
@param outputDir the {@link Path} of the output directory for
|
|
the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link Path} to the output directory for the map-reduce job.
|
|
|
|
@return the {@link Path} to the output directory for the map-reduce job.
|
|
@see FileOutputFormat#getWorkOutputPath(JobConf)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getWorkOutputPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link Path} to the task's temporary output directory
|
|
for the map-reduce job
|
|
|
|
<h4 id="SideEffectFiles">Tasks' Side-Effect Files</h4>
|
|
|
|
<p><i>Note:</i> The following is valid only if the {@link OutputCommitter}
|
|
is {@link FileOutputCommitter}. If <code>OutputCommitter</code> is not
|
|
a <code>FileOutputCommitter</code>, the task's temporary output
|
|
directory is same as {@link #getOutputPath(JobConf)} i.e.
|
|
<tt>${mapred.output.dir}$</tt></p>
|
|
|
|
<p>Some applications need to create/write-to side-files, which differ from
|
|
the actual job-outputs.
|
|
|
|
<p>In such cases there could be issues with 2 instances of the same TIP
|
|
(running simultaneously e.g. speculative tasks) trying to open/write-to the
|
|
same file (path) on HDFS. Hence the application-writer will have to pick
|
|
unique names per task-attempt (e.g. using the attemptid, say
|
|
<tt>attempt_200709221812_0001_m_000000_0</tt>), not just per TIP.</p>
|
|
|
|
<p>To get around this the Map-Reduce framework helps the application-writer
|
|
out by maintaining a special
|
|
<tt>${mapred.output.dir}/_temporary/_${taskid}</tt>
|
|
sub-directory for each task-attempt on HDFS where the output of the
|
|
task-attempt goes. On successful completion of the task-attempt the files
|
|
in the <tt>${mapred.output.dir}/_temporary/_${taskid}</tt> (only)
|
|
are <i>promoted</i> to <tt>${mapred.output.dir}</tt>. Of course, the
|
|
framework discards the sub-directory of unsuccessful task-attempts. This
|
|
is completely transparent to the application.</p>
|
|
|
|
<p>The application-writer can take advantage of this by creating any
|
|
side-files required in <tt>${mapred.work.output.dir}</tt> during execution
|
|
of his reduce-task i.e. via {@link #getWorkOutputPath(JobConf)}, and the
|
|
framework will move them out similarly - thus she doesn't have to pick
|
|
unique paths per task-attempt.</p>
|
|
|
|
<p><i>Note</i>: the value of <tt>${mapred.work.output.dir}</tt> during
|
|
execution of a particular task-attempt is actually
|
|
<tt>${mapred.output.dir}/_temporary/_{$taskid}</tt>, and this value is
|
|
set by the map-reduce framework. So, just create any side-files in the
|
|
path returned by {@link #getWorkOutputPath(JobConf)} from map/reduce
|
|
task to take advantage of this feature.</p>
|
|
|
|
<p>The entire discussion holds true for maps of jobs with
|
|
reducer=NONE (i.e. 0 reduces) since output of the map, in that case,
|
|
goes directly to HDFS.</p>
|
|
|
|
@return the {@link Path} to the task's temporary output directory
|
|
for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskOutputPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Helper function to create the task's temporary output directory and
|
|
return the path to the task's output file.
|
|
|
|
@param conf job-configuration
|
|
@param name temporary task-output filename
|
|
@return path to the task's temporary output file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUniqueName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Helper function to generate a name that is unique for the task.
|
|
|
|
<p>The generated name can be used to create custom files from within the
|
|
different tasks for the job, the names for different tasks will not collide
|
|
with each other.</p>
|
|
|
|
<p>The given name is postfixed with the task type, 'm' for maps, 'r' for
|
|
reduces and the task partition number. For example, give a name 'test'
|
|
running on the first map o the job the generated name will be
|
|
'test-m-00000'.</p>
|
|
|
|
@param conf the configuration for the job.
|
|
@param name the name to make unique.
|
|
@return a unique name accross all tasks of the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPathForCustomFile" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Helper function to generate a {@link Path} for a file that is unique for
|
|
the task within the job output directory.
|
|
|
|
<p>The path can be used to create custom files from within the map and
|
|
reduce tasks. The path name will be unique for each task. The path parent
|
|
will be the job output directory.</p>ls
|
|
|
|
<p>This method uses the {@link #getUniqueName} method to make the file name
|
|
unique for the task.</p>
|
|
|
|
@param conf the configuration for the job.
|
|
@param name the name for the file.
|
|
@return a unique path accross all tasks of the job.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A base class for {@link OutputFormat}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.FileOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.FileSplit -->
|
|
<class name="FileSplit" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputSplit"/>
|
|
<constructor name="FileSplit" type="org.apache.hadoop.fs.Path, long, long, org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="deprecated, no comment">
|
|
<doc>
|
|
<![CDATA[Constructs a split.
|
|
@deprecated
|
|
@param file the file name
|
|
@param start the position of the first byte in the file to process
|
|
@param length the number of bytes in the file to process]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="FileSplit" type="org.apache.hadoop.fs.Path, long, long, java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a split with host information
|
|
|
|
@param file the file name
|
|
@param start the position of the first byte in the file to process
|
|
@param length the number of bytes in the file to process
|
|
@param hosts the list of hosts containing the block, possibly null]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The file containing this split's data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStart" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The position of the first byte in the file to process.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of bytes in the file to process.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getLocations" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A section of an input file. Returned by {@link
|
|
InputFormat#getSplits(JobConf, int)} and passed to
|
|
{@link InputFormat#getRecordReader(InputSplit,JobConf,Reporter)}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.FileSplit -->
|
|
<!-- start class org.apache.hadoop.mapred.ID -->
|
|
<class name="ID" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable<org.apache.hadoop.mapred.ID>"/>
|
|
<constructor name="ID" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[constructs an ID object from the given int]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ID"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getId" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[returns the int which represents the identifier]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="that" type="org.apache.hadoop.mapred.ID"/>
|
|
<doc>
|
|
<![CDATA[Compare IDs by associated numbers]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="org.apache.hadoop.mapred.ID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="forName" return="org.apache.hadoop.mapred.ID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<exception name="IllegalArgumentException" type="java.lang.IllegalArgumentException"/>
|
|
<doc>
|
|
<![CDATA[Construct an ID object from given string
|
|
|
|
@return constructed Id object or null if the given String is null
|
|
@throws IllegalArgumentException if the given string is malformed]]>
|
|
</doc>
|
|
</method>
|
|
<field name="id" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A general identifier, which internally stores the id
|
|
as an integer. This is the super class of {@link JobID},
|
|
{@link TaskID} and {@link TaskAttemptID}.
|
|
|
|
@see JobID
|
|
@see TaskID
|
|
@see TaskAttemptID]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.ID -->
|
|
<!-- start interface org.apache.hadoop.mapred.InputFormat -->
|
|
<interface name="InputFormat" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getSplits" return="org.apache.hadoop.mapred.InputSplit[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="numSplits" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Logically split the set of input files for the job.
|
|
|
|
<p>Each {@link InputSplit} is then assigned to an individual {@link Mapper}
|
|
for processing.</p>
|
|
|
|
<p><i>Note</i>: The split is a <i>logical</i> split of the inputs and the
|
|
input files are not physically split into chunks. For e.g. a split could
|
|
be <i><input-file-path, start, offset></i> tuple.
|
|
|
|
@param job job configuration.
|
|
@param numSplits the desired number of splits, a hint.
|
|
@return an array of {@link InputSplit}s for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link RecordReader} for the given {@link InputSplit}.
|
|
|
|
<p>It is the responsibility of the <code>RecordReader</code> to respect
|
|
record boundaries while processing the logical split to present a
|
|
record-oriented view to the individual task.</p>
|
|
|
|
@param split the {@link InputSplit}
|
|
@param job the job that this split belongs to
|
|
@return a {@link RecordReader}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>InputFormat</code> describes the input-specification for a
|
|
Map-Reduce job.
|
|
|
|
<p>The Map-Reduce framework relies on the <code>InputFormat</code> of the
|
|
job to:<p>
|
|
<ol>
|
|
<li>
|
|
Validate the input-specification of the job.
|
|
<li>
|
|
Split-up the input file(s) into logical {@link InputSplit}s, each of
|
|
which is then assigned to an individual {@link Mapper}.
|
|
</li>
|
|
<li>
|
|
Provide the {@link RecordReader} implementation to be used to glean
|
|
input records from the logical <code>InputSplit</code> for processing by
|
|
the {@link Mapper}.
|
|
</li>
|
|
</ol>
|
|
|
|
<p>The default behavior of file-based {@link InputFormat}s, typically
|
|
sub-classes of {@link FileInputFormat}, is to split the
|
|
input into <i>logical</i> {@link InputSplit}s based on the total size, in
|
|
bytes, of the input files. However, the {@link FileSystem} blocksize of
|
|
the input files is treated as an upper bound for input splits. A lower bound
|
|
on the split size can be set via
|
|
<a href="{@docRoot}/../hadoop-default.html#mapred.min.split.size">
|
|
mapred.min.split.size</a>.</p>
|
|
|
|
<p>Clearly, logical splits based on input-size is insufficient for many
|
|
applications since record boundaries are to respected. In such cases, the
|
|
application has to also implement a {@link RecordReader} on whom lies the
|
|
responsibilty to respect record-boundaries and present a record-oriented
|
|
view of the logical <code>InputSplit</code> to the individual task.
|
|
|
|
@see InputSplit
|
|
@see RecordReader
|
|
@see JobClient
|
|
@see FileInputFormat]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.InputFormat -->
|
|
<!-- start interface org.apache.hadoop.mapred.InputSplit -->
|
|
<interface name="InputSplit" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the total number of bytes in the data of the <code>InputSplit</code>.
|
|
|
|
@return the number of bytes in the input split.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocations" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the list of hostnames where the input split is located.
|
|
|
|
@return list of hostnames where data of the <code>InputSplit</code> is
|
|
located as an array of <code>String</code>s.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>InputSplit</code> represents the data to be processed by an
|
|
individual {@link Mapper}.
|
|
|
|
<p>Typically, it presents a byte-oriented view on the input and is the
|
|
responsibility of {@link RecordReader} of the job to process this and present
|
|
a record-oriented view.
|
|
|
|
@see InputFormat
|
|
@see RecordReader]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.InputSplit -->
|
|
<!-- start class org.apache.hadoop.mapred.InvalidFileTypeException -->
|
|
<class name="InvalidFileTypeException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="InvalidFileTypeException"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="InvalidFileTypeException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Used when file type differs from the desired file type. like
|
|
getting a file when a directory is expected. Or a wrong file type.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.InvalidFileTypeException -->
|
|
<!-- start class org.apache.hadoop.mapred.InvalidInputException -->
|
|
<class name="InvalidInputException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="InvalidInputException" type="java.util.List<java.io.IOException>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create the exception with the given list.
|
|
@param probs the list of problems to report. this list is not copied.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getProblems" return="java.util.List<java.io.IOException>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the complete list of the problems reported.
|
|
@return the list of problems, which must not be modified]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMessage" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get a summary message of the problems found.
|
|
@return the concatenated messages from all of the problems.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class wraps a list of problems with the input, so that the user
|
|
can get a list of problems together instead of finding and fixing them one
|
|
by one.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.InvalidInputException -->
|
|
<!-- start class org.apache.hadoop.mapred.InvalidJobConfException -->
|
|
<class name="InvalidJobConfException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="InvalidJobConfException"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="InvalidJobConfException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[This exception is thrown when jobconf misses some mendatory attributes
|
|
or value of some attributes is invalid.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.InvalidJobConfException -->
|
|
<!-- start class org.apache.hadoop.mapred.IsolationRunner -->
|
|
<class name="IsolationRunner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="IsolationRunner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Run a single task
|
|
@param args the first argument is the task directory]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.IsolationRunner -->
|
|
<!-- start class org.apache.hadoop.mapred.JobClient -->
|
|
<class name="JobClient" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.MRConstants"/>
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="JobClient"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a job client.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobClient" type="org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Build a job client with the given {@link JobConf}, and connect to the
|
|
default {@link JobTracker}.
|
|
|
|
@param conf the job configuration.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobClient" type="java.net.InetSocketAddress, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Build a job client, connect to the indicated job tracker.
|
|
|
|
@param jobTrackAddr the job tracker to connect to.
|
|
@param conf configuration.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getCommandLineConfig" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[return the command line configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="init"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Connect to the default {@link JobTracker}.
|
|
@param conf the job configuration.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close the <code>JobClient</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFs" return="org.apache.hadoop.fs.FileSystem"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get a filesystem handle. We need this to prepare jobs
|
|
for submission to the MapReduce system.
|
|
|
|
@return the filesystem handle.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="submitJob" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobFile" type="java.lang.String"/>
|
|
<exception name="FileNotFoundException" type="java.io.FileNotFoundException"/>
|
|
<exception name="InvalidJobConfException" type="org.apache.hadoop.mapred.InvalidJobConfException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Submit a job to the MR system.
|
|
|
|
This returns a handle to the {@link RunningJob} which can be used to track
|
|
the running-job.
|
|
|
|
@param jobFile the job configuration.
|
|
@return a handle to the {@link RunningJob} which can be used to track the
|
|
running-job.
|
|
@throws FileNotFoundException
|
|
@throws InvalidJobConfException
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="submitJob" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="FileNotFoundException" type="java.io.FileNotFoundException"/>
|
|
<exception name="InvalidJobConfException" type="org.apache.hadoop.mapred.InvalidJobConfException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Submit a job to the MR system.
|
|
This returns a handle to the {@link RunningJob} which can be used to track
|
|
the running-job.
|
|
|
|
@param job the job configuration.
|
|
@return a handle to the {@link RunningJob} which can be used to track the
|
|
running-job.
|
|
@throws FileNotFoundException
|
|
@throws InvalidJobConfException
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isJobDirValid" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobDirPath" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Checks if the job directory is clean and has all the required components
|
|
for (re) starting the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJob" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get an {@link RunningJob} object to track an ongoing job. Returns
|
|
null if the id does not correspond to any known job.
|
|
|
|
@param jobid the jobid of the job.
|
|
@return the {@link RunningJob} handle to track the job, null if the
|
|
<code>jobid</code> doesn't correspond to any known job.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJob" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Applications should rather use {@link #getJob(JobID)}.">
|
|
<param name="jobid" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Applications should rather use {@link #getJob(JobID)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the information of the current state of the map tasks of a job.
|
|
|
|
@param jobId the job to query.
|
|
@return the list of all of the map tips.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Applications should rather use {@link #getMapTaskReports(JobID)}">
|
|
<param name="jobId" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Applications should rather use {@link #getMapTaskReports(JobID)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReduceTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the information of the current state of the reduce tasks of a job.
|
|
|
|
@param jobId the job to query.
|
|
@return the list of all of the reduce tips.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCleanupTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the information of the current state of the cleanup tasks of a job.
|
|
|
|
@param jobId the job to query.
|
|
@return the list of all of the cleanup tips.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSetupTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the information of the current state of the setup tasks of a job.
|
|
|
|
@param jobId the job to query.
|
|
@return the list of all of the setup tips.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReduceTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Applications should rather use {@link #getReduceTaskReports(JobID)}">
|
|
<param name="jobId" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Applications should rather use {@link #getReduceTaskReports(JobID)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClusterStatus" return="org.apache.hadoop.mapred.ClusterStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get status information about the Map-Reduce cluster.
|
|
|
|
@return the status information about the Map-Reduce cluster as an object
|
|
of {@link ClusterStatus}.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="jobsToComplete" return="org.apache.hadoop.mapred.JobStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the jobs that are not completed and not failed.
|
|
|
|
@return array of {@link JobStatus} for the running/to-be-run jobs.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAllJobs" return="org.apache.hadoop.mapred.JobStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the jobs that are submitted.
|
|
|
|
@return array of {@link JobStatus} for the submitted jobs.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="runJob" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Utility that submits a job, then polls for progress until the job is
|
|
complete.
|
|
|
|
@param job the job configuration.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskOutputFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newValue" type="org.apache.hadoop.mapred.JobClient.TaskStatusFilter"/>
|
|
<doc>
|
|
<![CDATA[Sets the output filter for tasks. only those tasks are printed whose
|
|
output matches the filter.
|
|
@param newValue task filter.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskOutputFilter" return="org.apache.hadoop.mapred.JobClient.TaskStatusFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the task output filter out of the JobConf.
|
|
|
|
@param job the JobConf to examine.
|
|
@return the filter level.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskOutputFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="newValue" type="org.apache.hadoop.mapred.JobClient.TaskStatusFilter"/>
|
|
<doc>
|
|
<![CDATA[Modify the JobConf to set the task output filter.
|
|
|
|
@param job the JobConf to modify.
|
|
@param newValue the value to set.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskOutputFilter" return="org.apache.hadoop.mapred.JobClient.TaskStatusFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns task output filter.
|
|
@return task filter.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<method name="getDefaultMaps" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get status information about the max available Maps in the cluster.
|
|
|
|
@return the max available Maps in the cluster
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultReduces" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get status information about the max available Reduces in the cluster.
|
|
|
|
@return the max available Reduces in the cluster
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSystemDir" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Grab the jobtracker system directory path where job-specific files are to be placed.
|
|
|
|
@return the system directory where job-specific files are to be placed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getQueues" return="org.apache.hadoop.mapred.JobQueueInfo[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return an array of queue information objects about all the Job Queues
|
|
configured.
|
|
|
|
@return Array of JobQueueInfo objects
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobsFromQueue" return="org.apache.hadoop.mapred.JobStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="queueName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Gets all the jobs which were added to particular Job Queue
|
|
|
|
@param queueName name of the Job Queue
|
|
@return Array of jobs present in the job queue
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getQueueInfo" return="org.apache.hadoop.mapred.JobQueueInfo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="queueName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Gets the queue information associated to a particular Job Queue
|
|
|
|
@param queueName name of the job queue.
|
|
@return Queue information associated to particular queue.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>JobClient</code> is the primary interface for the user-job to interact
|
|
with the {@link JobTracker}.
|
|
|
|
<code>JobClient</code> provides facilities to submit jobs, track their
|
|
progress, access component-tasks' reports/logs, get the Map-Reduce cluster
|
|
status information etc.
|
|
|
|
<p>The job submission process involves:
|
|
<ol>
|
|
<li>
|
|
Checking the input and output specifications of the job.
|
|
</li>
|
|
<li>
|
|
Computing the {@link InputSplit}s for the job.
|
|
</li>
|
|
<li>
|
|
Setup the requisite accounting information for the {@link DistributedCache}
|
|
of the job, if necessary.
|
|
</li>
|
|
<li>
|
|
Copying the job's jar and configuration to the map-reduce system directory
|
|
on the distributed file-system.
|
|
</li>
|
|
<li>
|
|
Submitting the job to the <code>JobTracker</code> and optionally monitoring
|
|
it's status.
|
|
</li>
|
|
</ol></p>
|
|
|
|
Normally the user creates the application, describes various facets of the
|
|
job via {@link JobConf} and then uses the <code>JobClient</code> to submit
|
|
the job and monitor its progress.
|
|
|
|
<p>Here is an example on how to use <code>JobClient</code>:</p>
|
|
<p><blockquote><pre>
|
|
// Create a new JobConf
|
|
JobConf job = new JobConf(new Configuration(), MyJob.class);
|
|
|
|
// Specify various job-specific parameters
|
|
job.setJobName("myjob");
|
|
|
|
job.setInputPath(new Path("in"));
|
|
job.setOutputPath(new Path("out"));
|
|
|
|
job.setMapperClass(MyJob.MyMapper.class);
|
|
job.setReducerClass(MyJob.MyReducer.class);
|
|
|
|
// Submit the job, then poll for progress until the job is complete
|
|
JobClient.runJob(job);
|
|
</pre></blockquote></p>
|
|
|
|
<h4 id="JobControl">Job Control</h4>
|
|
|
|
<p>At times clients would chain map-reduce jobs to accomplish complex tasks
|
|
which cannot be done via a single map-reduce job. This is fairly easy since
|
|
the output of the job, typically, goes to distributed file-system and that
|
|
can be used as the input for the next job.</p>
|
|
|
|
<p>However, this also means that the onus on ensuring jobs are complete
|
|
(success/failure) lies squarely on the clients. In such situations the
|
|
various job-control options are:
|
|
<ol>
|
|
<li>
|
|
{@link #runJob(JobConf)} : submits the job and returns only after
|
|
the job has completed.
|
|
</li>
|
|
<li>
|
|
{@link #submitJob(JobConf)} : only submits the job, then poll the
|
|
returned handle to the {@link RunningJob} to query status and make
|
|
scheduling decisions.
|
|
</li>
|
|
<li>
|
|
{@link JobConf#setJobEndNotificationURI(String)} : setup a notification
|
|
on job-completion, thus avoiding polling.
|
|
</li>
|
|
</ol></p>
|
|
|
|
@see JobConf
|
|
@see ClusterStatus
|
|
@see Tool
|
|
@see DistributedCache]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobClient -->
|
|
<!-- start class org.apache.hadoop.mapred.JobClient.TaskStatusFilter -->
|
|
<class name="JobClient.TaskStatusFilter" extends="java.lang.Enum<org.apache.hadoop.mapred.JobClient.TaskStatusFilter>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.JobClient.TaskStatusFilter[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.JobClient.TaskStatusFilter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobClient.TaskStatusFilter -->
|
|
<!-- start class org.apache.hadoop.mapred.JobConf -->
|
|
<class name="JobConf" extends="org.apache.hadoop.conf.Configuration"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a map/reduce job configuration.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobConf" type="java.lang.Class"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a map/reduce job configuration.
|
|
|
|
@param exampleClass a class whose containing jar is used as the job's jar.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobConf" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a map/reduce job configuration.
|
|
|
|
@param conf a Configuration whose settings will be inherited.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobConf" type="org.apache.hadoop.conf.Configuration, java.lang.Class"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a map/reduce job configuration.
|
|
|
|
@param conf a Configuration whose settings will be inherited.
|
|
@param exampleClass a class whose containing jar is used as the job's jar.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobConf" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a map/reduce configuration.
|
|
|
|
@param config a Configuration-format XML job description file.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobConf" type="org.apache.hadoop.fs.Path"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a map/reduce configuration.
|
|
|
|
@param config a Configuration-format XML job description file.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobConf" type="boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A new map/reduce configuration where the behavior of reading from the
|
|
default resources can be turned off.
|
|
<p/>
|
|
If the parameter {@code loadDefaults} is false, the new instance
|
|
will not load resources from the default files.
|
|
|
|
@param loadDefaults specifies whether to load from the default files]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getJar" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the user jar for the map-reduce job.
|
|
|
|
@return the user jar for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJar"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jar" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the user jar for the map-reduce job.
|
|
|
|
@param jar the user jar for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJarByClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cls" type="java.lang.Class"/>
|
|
<doc>
|
|
<![CDATA[Set the job's jar file by finding an example class location.
|
|
|
|
@param cls the example class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalDirs" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="deleteLocalFiles"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="deleteLocalFiles"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="subdir" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getLocalPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pathString" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Constructs a local file name. Files are distributed among configured
|
|
local directories.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUser" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the reported username for this job.
|
|
|
|
@return the username]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setUser"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="user" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the reported username for this job.
|
|
|
|
@param user the username for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setKeepFailedTaskFiles"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="keep" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether the framework should keep the intermediate files for
|
|
failed tasks.
|
|
|
|
@param keep <code>true</code> if framework should keep the intermediate files
|
|
for failed tasks, <code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeepFailedTaskFiles" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Should the temporary files for failed tasks be kept?
|
|
|
|
@return should the files be kept?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setKeepTaskFilesPattern"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pattern" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set a regular expression for task names that should be kept.
|
|
The regular expression ".*_m_000123_0" would keep the files
|
|
for the first instance of map 123 that ran.
|
|
|
|
@param pattern the java.util.regex.Pattern to match against the
|
|
task names.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeepTaskFilesPattern" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the regular expression that is matched against the task names
|
|
to see if we need to keep the files.
|
|
|
|
@return the pattern as a string, if it was set, othewise null.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the current working directory for the default file system.
|
|
|
|
@param dir the new current working directory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getWorkingDirectory" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the current working directory for the default file system.
|
|
|
|
@return the directory name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setNumTasksToExecutePerJvm"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="numTasks" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the number of tasks that a spawned task JVM should run
|
|
before it exits
|
|
@param numTasks the number of tasks to execute; defaults to 1;
|
|
-1 signifies no limit]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumTasksToExecutePerJvm" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the number of tasks that a spawned JVM should execute]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInputFormat" return="org.apache.hadoop.mapred.InputFormat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link InputFormat} implementation for the map-reduce job,
|
|
defaults to {@link TextInputFormat} if not specified explicity.
|
|
|
|
@return the {@link InputFormat} implementation for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInputFormat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.InputFormat>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link InputFormat} implementation for the map-reduce job.
|
|
|
|
@param theClass the {@link InputFormat} implementation for the map-reduce
|
|
job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputFormat" return="org.apache.hadoop.mapred.OutputFormat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link OutputFormat} implementation for the map-reduce job,
|
|
defaults to {@link TextOutputFormat} if not specified explicity.
|
|
|
|
@return the {@link OutputFormat} implementation for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputCommitter" return="org.apache.hadoop.mapred.OutputCommitter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link OutputCommitter} implementation for the map-reduce job,
|
|
defaults to {@link FileOutputCommitter} if not specified explicitly.
|
|
|
|
@return the {@link OutputCommitter} implementation for the map-reduce job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputCommitter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.OutputCommitter>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link OutputCommitter} implementation for the map-reduce job.
|
|
|
|
@param theClass the {@link OutputCommitter} implementation for the map-reduce
|
|
job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputFormat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.OutputFormat>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link OutputFormat} implementation for the map-reduce job.
|
|
|
|
@param theClass the {@link OutputFormat} implementation for the map-reduce
|
|
job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCompressMapOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="compress" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Should the map outputs be compressed before transfer?
|
|
Uses the SequenceFile compression.
|
|
|
|
@param compress should the map outputs be compressed?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCompressMapOutput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Are the outputs of the maps be compressed?
|
|
|
|
@return <code>true</code> if the outputs of the maps are to be compressed,
|
|
<code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapOutputCompressorClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="codecClass" type="java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec>"/>
|
|
<doc>
|
|
<![CDATA[Set the given class as the {@link CompressionCodec} for the map outputs.
|
|
|
|
@param codecClass the {@link CompressionCodec} class that will compress
|
|
the map outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapOutputCompressorClass" return="java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="defaultValue" type="java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec>"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link CompressionCodec} for compressing the map outputs.
|
|
|
|
@param defaultValue the {@link CompressionCodec} to return if not set
|
|
@return the {@link CompressionCodec} class that should be used to compress the
|
|
map outputs.
|
|
@throws IllegalArgumentException if the class was specified, but not found]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapOutputKeyClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the key class for the map output data. If it is not set, use the
|
|
(final) output key class. This allows the map output key class to be
|
|
different than the final output key class.
|
|
|
|
@return the map output key class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapOutputKeyClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Set the key class for the map output data. This allows the user to
|
|
specify the map output key class to be different than the final output
|
|
value class.
|
|
|
|
@param theClass the map output key class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapOutputValueClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the value class for the map output data. If it is not set, use the
|
|
(final) output value class This allows the map output value class to be
|
|
different than the final output value class.
|
|
|
|
@return the map output value class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapOutputValueClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Set the value class for the map output data. This allows the user to
|
|
specify the map output value class to be different than the final output
|
|
value class.
|
|
|
|
@param theClass the map output value class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputKeyClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the key class for the job output data.
|
|
|
|
@return the key class for the job output data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputKeyClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Set the key class for the job output data.
|
|
|
|
@param theClass the key class for the job output data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputKeyComparator" return="org.apache.hadoop.io.RawComparator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link RawComparator} comparator used to compare keys.
|
|
|
|
@return the {@link RawComparator} comparator used to compare keys.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputKeyComparatorClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.io.RawComparator>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link RawComparator} comparator used to compare keys.
|
|
|
|
@param theClass the {@link RawComparator} comparator used to
|
|
compare keys.
|
|
@see #setOutputValueGroupingComparator(Class)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setKeyFieldComparatorOptions"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="keySpec" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link KeyFieldBasedComparator} options used to compare keys.
|
|
|
|
@param keySpec the key specification of the form -k pos1[,pos2], where,
|
|
pos is of the form f[.c][opts], where f is the number
|
|
of the key field to use, and c is the number of the first character from
|
|
the beginning of the field. Fields and character posns are numbered
|
|
starting with 1; a character position of zero in pos2 indicates the
|
|
field's last character. If '.c' is omitted from pos1, it defaults to 1
|
|
(the beginning of the field); if omitted from pos2, it defaults to 0
|
|
(the end of the field). opts are ordering options. The supported options
|
|
are:
|
|
-n, (Sort numerically)
|
|
-r, (Reverse the result of comparison)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeyFieldComparatorOption" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link KeyFieldBasedComparator} options]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setKeyFieldPartitionerOptions"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="keySpec" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link KeyFieldBasedPartitioner} options used for
|
|
{@link Partitioner}
|
|
|
|
@param keySpec the key specification of the form -k pos1[,pos2], where,
|
|
pos is of the form f[.c][opts], where f is the number
|
|
of the key field to use, and c is the number of the first character from
|
|
the beginning of the field. Fields and character posns are numbered
|
|
starting with 1; a character position of zero in pos2 indicates the
|
|
field's last character. If '.c' is omitted from pos1, it defaults to 1
|
|
(the beginning of the field); if omitted from pos2, it defaults to 0
|
|
(the end of the field).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeyFieldPartitionerOption" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link KeyFieldBasedPartitioner} options]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputValueGroupingComparator" return="org.apache.hadoop.io.RawComparator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the user defined {@link WritableComparable} comparator for
|
|
grouping keys of inputs to the reduce.
|
|
|
|
@return comparator set by the user for grouping values.
|
|
@see #setOutputValueGroupingComparator(Class) for details.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputValueGroupingComparator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.io.RawComparator>"/>
|
|
<doc>
|
|
<![CDATA[Set the user defined {@link RawComparator} comparator for
|
|
grouping keys in the input to the reduce.
|
|
|
|
<p>This comparator should be provided if the equivalence rules for keys
|
|
for sorting the intermediates are different from those for grouping keys
|
|
before each call to
|
|
{@link Reducer#reduce(Object, java.util.Iterator, OutputCollector, Reporter)}.</p>
|
|
|
|
<p>For key-value pairs (K1,V1) and (K2,V2), the values (V1, V2) are passed
|
|
in a single call to the reduce function if K1 and K2 compare as equal.</p>
|
|
|
|
<p>Since {@link #setOutputKeyComparatorClass(Class)} can be used to control
|
|
how keys are sorted, this can be used in conjunction to simulate
|
|
<i>secondary sort on values</i>.</p>
|
|
|
|
<p><i>Note</i>: This is not a guarantee of the reduce sort being
|
|
<i>stable</i> in any sense. (In any case, with the order of available
|
|
map-outputs to the reduce being non-deterministic, it wouldn't make
|
|
that much sense.)</p>
|
|
|
|
@param theClass the comparator class to be used for grouping keys.
|
|
It should implement <code>RawComparator</code>.
|
|
@see #setOutputKeyComparatorClass(Class)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputValueClass" return="java.lang.Class<?>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the value class for job outputs.
|
|
|
|
@return the value class for job outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputValueClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Set the value class for job outputs.
|
|
|
|
@param theClass the value class for job outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapperClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.Mapper>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link Mapper} class for the job.
|
|
|
|
@return the {@link Mapper} class for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapperClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Mapper>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link Mapper} class for the job.
|
|
|
|
@param theClass the {@link Mapper} class for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapRunnerClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.MapRunnable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link MapRunnable} class for the job.
|
|
|
|
@return the {@link MapRunnable} class for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapRunnerClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.MapRunnable>"/>
|
|
<doc>
|
|
<![CDATA[Expert: Set the {@link MapRunnable} class for the job.
|
|
|
|
Typically used to exert greater control on {@link Mapper}s.
|
|
|
|
@param theClass the {@link MapRunnable} class for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPartitionerClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.Partitioner>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link Partitioner} used to partition {@link Mapper}-outputs
|
|
to be sent to the {@link Reducer}s.
|
|
|
|
@return the {@link Partitioner} used to partition map-outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setPartitionerClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Partitioner>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link Partitioner} class used to partition
|
|
{@link Mapper}-outputs to be sent to the {@link Reducer}s.
|
|
|
|
@param theClass the {@link Partitioner} used to partition map-outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReducerClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.Reducer>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link Reducer} class for the job.
|
|
|
|
@return the {@link Reducer} class for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setReducerClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Reducer>"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link Reducer} class for the job.
|
|
|
|
@param theClass the {@link Reducer} class for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.Reducer>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the user-defined <i>combiner</i> class used to combine map-outputs
|
|
before being sent to the reducers. Typically the combiner is same as the
|
|
the {@link Reducer} for the job i.e. {@link #getReducerClass()}.
|
|
|
|
@return the user-defined combiner class used to combine map-outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCombinerClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Reducer>"/>
|
|
<doc>
|
|
<![CDATA[Set the user-defined <i>combiner</i> class used to combine map-outputs
|
|
before being sent to the reducers.
|
|
|
|
<p>The combiner is a task-level aggregation operation which, in some cases,
|
|
helps to cut down the amount of data transferred from the {@link Mapper} to
|
|
the {@link Reducer}, leading to better performance.</p>
|
|
|
|
<p>Typically the combiner is same as the the <code>Reducer</code> for the
|
|
job i.e. {@link #setReducerClass(Class)}.</p>
|
|
|
|
@param theClass the user-defined combiner class used to combine
|
|
map-outputs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSpeculativeExecution" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Should speculative execution be used for this job?
|
|
Defaults to <code>true</code>.
|
|
|
|
@return <code>true</code> if speculative execution be used for this job,
|
|
<code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSpeculativeExecution"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="speculativeExecution" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Turn speculative execution on or off for this job.
|
|
|
|
@param speculativeExecution <code>true</code> if speculative execution
|
|
should be turned on, else <code>false</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapSpeculativeExecution" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Should speculative execution be used for this job for map tasks?
|
|
Defaults to <code>true</code>.
|
|
|
|
@return <code>true</code> if speculative execution be
|
|
used for this job for map tasks,
|
|
<code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapSpeculativeExecution"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="speculativeExecution" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Turn speculative execution on or off for this job for map tasks.
|
|
|
|
@param speculativeExecution <code>true</code> if speculative execution
|
|
should be turned on for map tasks,
|
|
else <code>false</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReduceSpeculativeExecution" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Should speculative execution be used for this job for reduce tasks?
|
|
Defaults to <code>true</code>.
|
|
|
|
@return <code>true</code> if speculative execution be used
|
|
for reduce tasks for this job,
|
|
<code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setReduceSpeculativeExecution"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="speculativeExecution" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Turn speculative execution on or off for this job for reduce tasks.
|
|
|
|
@param speculativeExecution <code>true</code> if speculative execution
|
|
should be turned on for reduce tasks,
|
|
else <code>false</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumMapTasks" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get configured the number of reduce tasks for this job.
|
|
Defaults to <code>1</code>.
|
|
|
|
@return the number of reduce tasks for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setNumMapTasks"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the number of map tasks for this job.
|
|
|
|
<p><i>Note</i>: This is only a <i>hint</i> to the framework. The actual
|
|
number of spawned map tasks depends on the number of {@link InputSplit}s
|
|
generated by the job's {@link InputFormat#getSplits(JobConf, int)}.
|
|
|
|
A custom {@link InputFormat} is typically used to accurately control
|
|
the number of map tasks for the job.</p>
|
|
|
|
<h4 id="NoOfMaps">How many maps?</h4>
|
|
|
|
<p>The number of maps is usually driven by the total size of the inputs
|
|
i.e. total number of blocks of the input files.</p>
|
|
|
|
<p>The right level of parallelism for maps seems to be around 10-100 maps
|
|
per-node, although it has been set up to 300 or so for very cpu-light map
|
|
tasks. Task setup takes awhile, so it is best if the maps take at least a
|
|
minute to execute.</p>
|
|
|
|
<p>The default behavior of file-based {@link InputFormat}s is to split the
|
|
input into <i>logical</i> {@link InputSplit}s based on the total size, in
|
|
bytes, of input files. However, the {@link FileSystem} blocksize of the
|
|
input files is treated as an upper bound for input splits. A lower bound
|
|
on the split size can be set via
|
|
<a href="{@docRoot}/../hadoop-default.html#mapred.min.split.size">
|
|
mapred.min.split.size</a>.</p>
|
|
|
|
<p>Thus, if you expect 10TB of input data and have a blocksize of 128MB,
|
|
you'll end up with 82,000 maps, unless {@link #setNumMapTasks(int)} is
|
|
used to set it even higher.</p>
|
|
|
|
@param n the number of map tasks for this job.
|
|
@see InputFormat#getSplits(JobConf, int)
|
|
@see FileInputFormat
|
|
@see FileSystem#getDefaultBlockSize()
|
|
@see FileStatus#getBlockSize()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumReduceTasks" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get configured the number of reduce tasks for this job. Defaults to
|
|
<code>1</code>.
|
|
|
|
@return the number of reduce tasks for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setNumReduceTasks"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the requisite number of reduce tasks for this job.
|
|
|
|
<h4 id="NoOfReduces">How many reduces?</h4>
|
|
|
|
<p>The right number of reduces seems to be <code>0.95</code> or
|
|
<code>1.75</code> multiplied by (<<i>no. of nodes</i>> *
|
|
<a href="{@docRoot}/../hadoop-default.html#mapred.tasktracker.reduce.tasks.maximum">
|
|
mapred.tasktracker.reduce.tasks.maximum</a>).
|
|
</p>
|
|
|
|
<p>With <code>0.95</code> all of the reduces can launch immediately and
|
|
start transfering map outputs as the maps finish. With <code>1.75</code>
|
|
the faster nodes will finish their first round of reduces and launch a
|
|
second wave of reduces doing a much better job of load balancing.</p>
|
|
|
|
<p>Increasing the number of reduces increases the framework overhead, but
|
|
increases load balancing and lowers the cost of failures.</p>
|
|
|
|
<p>The scaling factors above are slightly less than whole numbers to
|
|
reserve a few reduce slots in the framework for speculative-tasks, failures
|
|
etc.</p>
|
|
|
|
<h4 id="ReducerNone">Reducer NONE</h4>
|
|
|
|
<p>It is legal to set the number of reduce-tasks to <code>zero</code>.</p>
|
|
|
|
<p>In this case the output of the map-tasks directly go to distributed
|
|
file-system, to the path set by
|
|
{@link FileOutputFormat#setOutputPath(JobConf, Path)}. Also, the
|
|
framework doesn't sort the map-outputs before writing it out to HDFS.</p>
|
|
|
|
@param n the number of reduce tasks for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxMapAttempts" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the configured number of maximum attempts that will be made to run a
|
|
map task, as specified by the <code>mapred.map.max.attempts</code>
|
|
property. If this property is not already set, the default is 4 attempts.
|
|
|
|
@return the max number of attempts per map task.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMaxMapAttempts"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="int"/>
|
|
<doc>
|
|
<![CDATA[Expert: Set the number of maximum attempts that will be made to run a
|
|
map task.
|
|
|
|
@param n the number of attempts per map task.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxReduceAttempts" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the configured number of maximum attempts that will be made to run a
|
|
reduce task, as specified by the <code>mapred.reduce.max.attempts</code>
|
|
property. If this property is not already set, the default is 4 attempts.
|
|
|
|
@return the max number of attempts per reduce task.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMaxReduceAttempts"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="int"/>
|
|
<doc>
|
|
<![CDATA[Expert: Set the number of maximum attempts that will be made to run a
|
|
reduce task.
|
|
|
|
@param n the number of attempts per reduce task.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the user-specified job name. This is only used to identify the
|
|
job to the user.
|
|
|
|
@return the job's name, defaulting to "".]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the user-specified job name.
|
|
|
|
@param name the job's new name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSessionId" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the user-specified session identifier. The default is the empty string.
|
|
|
|
The session identifier is used to tag metric data that is reported to some
|
|
performance metrics system via the org.apache.hadoop.metrics API. The
|
|
session identifier is intended, in particular, for use by Hadoop-On-Demand
|
|
(HOD) which allocates a virtual Hadoop cluster dynamically and transiently.
|
|
HOD will set the session identifier by modifying the hadoop-site.xml file
|
|
before starting the cluster.
|
|
|
|
When not running under HOD, this identifer is expected to remain set to
|
|
the empty string.
|
|
|
|
@return the session identifier, defaulting to "".]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSessionId"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="sessionId" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the user-specified session identifier.
|
|
|
|
@param sessionId the new session id.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMaxTaskFailuresPerTracker"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="noFailures" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the maximum no. of failures of a given job per tasktracker.
|
|
If the no. of task failures exceeds <code>noFailures</code>, the
|
|
tasktracker is <i>blacklisted</i> for this job.
|
|
|
|
@param noFailures maximum no. of failures of a given job per tasktracker.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxTaskFailuresPerTracker" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Expert: Get the maximum no. of failures of a given job per tasktracker.
|
|
If the no. of task failures exceeds this, the tasktracker is
|
|
<i>blacklisted</i> for this job.
|
|
|
|
@return the maximum no. of failures of a given job per tasktracker.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxMapTaskFailuresPercent" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the maximum percentage of map tasks that can fail without
|
|
the job being aborted.
|
|
|
|
Each map task is executed a minimum of {@link #getMaxMapAttempts()}
|
|
attempts before being declared as <i>failed</i>.
|
|
|
|
Defaults to <code>zero</code>, i.e. <i>any</i> failed map-task results in
|
|
the job being declared as {@link JobStatus#FAILED}.
|
|
|
|
@return the maximum percentage of map tasks that can fail without
|
|
the job being aborted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMaxMapTaskFailuresPercent"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="percent" type="int"/>
|
|
<doc>
|
|
<![CDATA[Expert: Set the maximum percentage of map tasks that can fail without the
|
|
job being aborted.
|
|
|
|
Each map task is executed a minimum of {@link #getMaxMapAttempts} attempts
|
|
before being declared as <i>failed</i>.
|
|
|
|
@param percent the maximum percentage of map tasks that can fail without
|
|
the job being aborted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxReduceTaskFailuresPercent" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the maximum percentage of reduce tasks that can fail without
|
|
the job being aborted.
|
|
|
|
Each reduce task is executed a minimum of {@link #getMaxReduceAttempts()}
|
|
attempts before being declared as <i>failed</i>.
|
|
|
|
Defaults to <code>zero</code>, i.e. <i>any</i> failed reduce-task results
|
|
in the job being declared as {@link JobStatus#FAILED}.
|
|
|
|
@return the maximum percentage of reduce tasks that can fail without
|
|
the job being aborted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMaxReduceTaskFailuresPercent"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="percent" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the maximum percentage of reduce tasks that can fail without the job
|
|
being aborted.
|
|
|
|
Each reduce task is executed a minimum of {@link #getMaxReduceAttempts()}
|
|
attempts before being declared as <i>failed</i>.
|
|
|
|
@param percent the maximum percentage of reduce tasks that can fail without
|
|
the job being aborted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobPriority"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="prio" type="org.apache.hadoop.mapred.JobPriority"/>
|
|
<doc>
|
|
<![CDATA[Set {@link JobPriority} for this job.
|
|
|
|
@param prio the {@link JobPriority} for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobPriority" return="org.apache.hadoop.mapred.JobPriority"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the {@link JobPriority} for this job.
|
|
|
|
@return the {@link JobPriority} for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProfileEnabled" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get whether the task profiling is enabled.
|
|
@return true if some tasks will be profiled]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setProfileEnabled"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newValue" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether the system should collect profiler information for some of
|
|
the tasks in this job? The information is stored in the the user log
|
|
directory.
|
|
@param newValue true means it should be gathered]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProfileParams" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the profiler configuration arguments.
|
|
|
|
The default value for this property is
|
|
"-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,verbose=n,file=%s"
|
|
|
|
@return the parameters to pass to the task child to configure profiling]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setProfileParams"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the profiler configuration arguments. If the string contains a '%s' it
|
|
will be replaced with the name of the profiling output file when the task
|
|
runs.
|
|
|
|
This value is passed to the task child JVM on the command line.
|
|
|
|
@param value the configuration string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProfileTaskRange" return="org.apache.hadoop.conf.Configuration.IntegerRanges"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="isMap" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Get the range of maps or reduces to profile.
|
|
@param isMap is the task a map?
|
|
@return the task ranges]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setProfileTaskRange"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="isMap" type="boolean"/>
|
|
<param name="newValue" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the ranges of maps or reduces to profile. setProfileEnabled(true)
|
|
must also be called.
|
|
@param newValue a set of integer ranges of the map ids]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapDebugScript"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="mDbgScript" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the debug script to run when the map tasks fail.
|
|
|
|
<p>The debug script can aid debugging of failed map tasks. The script is
|
|
given task's stdout, stderr, syslog, jobconf files as arguments.</p>
|
|
|
|
<p>The debug command, run on the node where the map failed, is:</p>
|
|
<p><pre><blockquote>
|
|
$script $stdout $stderr $syslog $jobconf.
|
|
</blockquote></pre></p>
|
|
|
|
<p> The script file is distributed through {@link DistributedCache}
|
|
APIs. The script needs to be symlinked. </p>
|
|
|
|
<p>Here is an example on how to submit a script
|
|
<p><blockquote><pre>
|
|
job.setMapDebugScript("./myscript");
|
|
DistributedCache.createSymlink(job);
|
|
DistributedCache.addCacheFile("/debug/scripts/myscript#myscript");
|
|
</pre></blockquote></p>
|
|
|
|
@param mDbgScript the script name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapDebugScript" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the map task's debug script.
|
|
|
|
@return the debug Script for the mapred job for failed map tasks.
|
|
@see #setMapDebugScript(String)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setReduceDebugScript"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rDbgScript" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the debug script to run when the reduce tasks fail.
|
|
|
|
<p>The debug script can aid debugging of failed reduce tasks. The script
|
|
is given task's stdout, stderr, syslog, jobconf files as arguments.</p>
|
|
|
|
<p>The debug command, run on the node where the map failed, is:</p>
|
|
<p><pre><blockquote>
|
|
$script $stdout $stderr $syslog $jobconf.
|
|
</blockquote></pre></p>
|
|
|
|
<p> The script file is distributed through {@link DistributedCache}
|
|
APIs. The script file needs to be symlinked </p>
|
|
|
|
<p>Here is an example on how to submit a script
|
|
<p><blockquote><pre>
|
|
job.setReduceDebugScript("./myscript");
|
|
DistributedCache.createSymlink(job);
|
|
DistributedCache.addCacheFile("/debug/scripts/myscript#myscript");
|
|
</pre></blockquote></p>
|
|
|
|
@param rDbgScript the script name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReduceDebugScript" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the reduce task's debug Script
|
|
|
|
@return the debug script for the mapred job for failed reduce tasks.
|
|
@see #setReduceDebugScript(String)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobEndNotificationURI" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the uri to be invoked in-order to send a notification after the job
|
|
has completed (success/failure).
|
|
|
|
@return the job end notification uri, <code>null</code> if it hasn't
|
|
been set.
|
|
@see #setJobEndNotificationURI(String)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobEndNotificationURI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uri" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the uri to be invoked in-order to send a notification after the job
|
|
has completed (success/failure).
|
|
|
|
<p>The uri can contain 2 special parameters: <tt>$jobId</tt> and
|
|
<tt>$jobStatus</tt>. Those, if present, are replaced by the job's
|
|
identifier and completion-status respectively.</p>
|
|
|
|
<p>This is typically used by application-writers to implement chaining of
|
|
Map-Reduce jobs in an <i>asynchronous manner</i>.</p>
|
|
|
|
@param uri the job end notification uri
|
|
@see JobStatus
|
|
@see <a href="{@docRoot}/org/apache/hadoop/mapred/JobClient.html#JobCompletionAndChaining">Job Completion and Chaining</a>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobLocalDir" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get job-specific shared directory for use as scratch space
|
|
|
|
<p>
|
|
When a job starts, a shared directory is created at location
|
|
<code>
|
|
${mapred.local.dir}/taskTracker/jobcache/$jobid/work/ </code>.
|
|
This directory is exposed to the users through
|
|
<code>job.local.dir </code>.
|
|
So, the tasks can use this space
|
|
as scratch space and share files among them. </p>
|
|
This value is available as System property also.
|
|
|
|
@return The localized job specific shared directory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getQueueName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the name of the queue to which this job is submitted.
|
|
Defaults to 'default'.
|
|
|
|
@return name of the queue]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setQueueName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="queueName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the name of the queue to which this job should be submitted.
|
|
|
|
@param queueName Name of the queue]]>
|
|
</doc>
|
|
</method>
|
|
<field name="DEFAULT_QUEUE_NAME" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Name of the queue to which jobs will be submitted, if no queue
|
|
name is mentioned.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A map/reduce job configuration.
|
|
|
|
<p><code>JobConf</code> is the primary interface for a user to describe a
|
|
map-reduce job to the Hadoop framework for execution. The framework tries to
|
|
faithfully execute the job as-is described by <code>JobConf</code>, however:
|
|
<ol>
|
|
<li>
|
|
Some configuration parameters might have been marked as
|
|
<a href="{@docRoot}/org/apache/hadoop/conf/Configuration.html#FinalParams">
|
|
final</a> by administrators and hence cannot be altered.
|
|
</li>
|
|
<li>
|
|
While some job parameters are straight-forward to set
|
|
(e.g. {@link #setNumReduceTasks(int)}), some parameters interact subtly
|
|
rest of the framework and/or job-configuration and is relatively more
|
|
complex for the user to control finely (e.g. {@link #setNumMapTasks(int)}).
|
|
</li>
|
|
</ol></p>
|
|
|
|
<p><code>JobConf</code> typically specifies the {@link Mapper}, combiner
|
|
(if any), {@link Partitioner}, {@link Reducer}, {@link InputFormat} and
|
|
{@link OutputFormat} implementations to be used etc.
|
|
|
|
<p>Optionally <code>JobConf</code> is used to specify other advanced facets
|
|
of the job such as <code>Comparator</code>s to be used, files to be put in
|
|
the {@link DistributedCache}, whether or not intermediate and/or job outputs
|
|
are to be compressed (and how), debugability via user-provided scripts
|
|
( {@link #setMapDebugScript(String)}/{@link #setReduceDebugScript(String)}),
|
|
for doing post-processing on task logs, task's stdout, stderr, syslog.
|
|
and etc.</p>
|
|
|
|
<p>Here is an example on how to configure a job via <code>JobConf</code>:</p>
|
|
<p><blockquote><pre>
|
|
// Create a new JobConf
|
|
JobConf job = new JobConf(new Configuration(), MyJob.class);
|
|
|
|
// Specify various job-specific parameters
|
|
job.setJobName("myjob");
|
|
|
|
FileInputFormat.setInputPaths(job, new Path("in"));
|
|
FileOutputFormat.setOutputPath(job, new Path("out"));
|
|
|
|
job.setMapperClass(MyJob.MyMapper.class);
|
|
job.setCombinerClass(MyJob.MyReducer.class);
|
|
job.setReducerClass(MyJob.MyReducer.class);
|
|
|
|
job.setInputFormat(SequenceFileInputFormat.class);
|
|
job.setOutputFormat(SequenceFileOutputFormat.class);
|
|
</pre></blockquote></p>
|
|
|
|
@see JobClient
|
|
@see ClusterStatus
|
|
@see Tool
|
|
@see DistributedCache]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobConf -->
|
|
<!-- start interface org.apache.hadoop.mapred.JobConfigurable -->
|
|
<interface name="JobConfigurable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Initializes a new instance from a {@link JobConf}.
|
|
|
|
@param job the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[That what may be configured.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.JobConfigurable -->
|
|
<!-- start class org.apache.hadoop.mapred.JobContext -->
|
|
<class name="JobContext" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getJobConf" return="org.apache.hadoop.mapred.JobConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the job Configuration
|
|
|
|
@return JobConf]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgressible" return="org.apache.hadoop.util.Progressable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the progress mechanism for reporting progress.
|
|
|
|
@return progress mechanism]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobContext -->
|
|
<!-- start class org.apache.hadoop.mapred.JobEndNotifier -->
|
|
<class name="JobEndNotifier" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobEndNotifier"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="startNotifier"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="stopNotifier"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="registerNotification"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="status" type="org.apache.hadoop.mapred.JobStatus"/>
|
|
</method>
|
|
<method name="localRunnerNotification"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="status" type="org.apache.hadoop.mapred.JobStatus"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobEndNotifier -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory -->
|
|
<class name="JobHistory" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobHistory"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="init" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="hostname" type="java.lang.String"/>
|
|
<param name="jobTrackerStartTime" type="long"/>
|
|
<doc>
|
|
<![CDATA[Initialize JobHistory files.
|
|
@param conf Jobconf of the job tracker.
|
|
@param hostname jobtracker's hostname
|
|
@param jobTrackerStartTime jobtracker's start time
|
|
@return true if intialized properly
|
|
false otherwise]]>
|
|
</doc>
|
|
</method>
|
|
<method name="parseHistoryFromFS"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="java.lang.String"/>
|
|
<param name="l" type="org.apache.hadoop.mapred.JobHistory.Listener"/>
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Parses history file and invokes Listener.handle() for
|
|
each line of history. It can be used for looking through history
|
|
files for specific items without having to keep whole history in memory.
|
|
@param path path to history file
|
|
@param l Listener for history events
|
|
@param fs FileSystem where history file is present
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isDisableHistory" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns history disable status. by default history is enabled so this
|
|
method returns false.
|
|
@return true if history logging is disabled, false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDisableHistory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="disableHistory" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Enable/disable history logging. Default value is false, so history
|
|
is enabled by default.
|
|
@param disableHistory true if history should be disabled, false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskLogsUrl" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="attempt" type="org.apache.hadoop.mapred.JobHistory.TaskAttempt"/>
|
|
<doc>
|
|
<![CDATA[Return the TaskLogsUrl of a particular TaskAttempt
|
|
|
|
@param attempt
|
|
@return the taskLogsUrl. null if http-port or tracker-name or
|
|
task-attempt-id are unavailable.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="JOB_NAME_TRIM_LENGTH" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Provides methods for writing to and reading from job history.
|
|
Job History works in an append mode, JobHistory and its inner classes provide methods
|
|
to log job events.
|
|
|
|
JobHistory is split into multiple files, format of each file is plain text where each line
|
|
is of the format [type (key=value)*], where type identifies the type of the record.
|
|
Type maps to UID of one of the inner classes of this class.
|
|
|
|
Job history is maintained in a master index which contains star/stop times of all jobs with
|
|
a few other job level properties. Apart from this each job's history is maintained in a seperate history
|
|
file. name of job history files follows the format jobtrackerId_jobid
|
|
|
|
For parsing the job history it supports a listener based interface where each line is parsed
|
|
and passed to listener. The listener can create an object model of history or look for specific
|
|
events and discard rest of the history.
|
|
|
|
CHANGE LOG :
|
|
Version 0 : The history has the following format :
|
|
TAG KEY1="VALUE1" KEY2="VALUE2" and so on.
|
|
TAG can be Job, Task, MapAttempt or ReduceAttempt.
|
|
Note that a '"' is the line delimiter.
|
|
Version 1 : Changes the line delimiter to '.'
|
|
Values are now escaped for unambiguous parsing.
|
|
Added the Meta tag to store version info.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.HistoryCleaner -->
|
|
<class name="JobHistory.HistoryCleaner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Runnable"/>
|
|
<constructor name="JobHistory.HistoryCleaner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Cleans up history data.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Delete history files older than one month. Update master index and remove all
|
|
jobs older than one month. Also if a job tracker has no jobs in last one month
|
|
remove reference to the job tracker.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.HistoryCleaner -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.JobInfo -->
|
|
<class name="JobHistory.JobInfo" extends="org.apache.hadoop.mapred.JobHistory.KeyValuePair"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobHistory.JobInfo" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create new JobInfo]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getAllTasks" return="java.util.Map<java.lang.String, org.apache.hadoop.mapred.JobHistory.Task>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns all map and reduce tasks <taskid-Task>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocalJobFilePath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<doc>
|
|
<![CDATA[Get the path of the locally stored job file
|
|
@param jobId id of the job
|
|
@return the path of the job file on the local file system]]>
|
|
</doc>
|
|
</method>
|
|
<method name="encodeJobHistoryFilePath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logFile" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Helper function to encode the URL of the path of the job-history
|
|
log file.
|
|
|
|
@param logFile path of the job-history file
|
|
@return URL encoded path
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="encodeJobHistoryFileName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logFileName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Helper function to encode the URL of the filename of the job-history
|
|
log file.
|
|
|
|
@param logFileName file name of the job-history file
|
|
@return URL encoded filename
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="decodeJobHistoryFileName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logFileName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Helper function to decode the URL of the filename of the job-history
|
|
log file.
|
|
|
|
@param logFileName file name of the job-history file
|
|
@return URL decoded filename
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUserName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the user name from the job conf]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobHistoryLogLocation" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logFileName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the job history file path given the history filename]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobHistoryLogLocationForUser" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logFileName" type="java.lang.String"/>
|
|
<param name="jobConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the user job history file path]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobHistoryFileName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="id" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Recover the job history filename from the history folder.
|
|
Uses the following pattern
|
|
$jt-hostname_[0-9]*_$job-id_$user-$job-name*
|
|
@param jobConf the job conf
|
|
@param id job id]]>
|
|
</doc>
|
|
</method>
|
|
<method name="recoverJobHistoryFile" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="logFilePath" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Selects one of the two files generated as a part of recovery.
|
|
The thumb rule is that always select the oldest file.
|
|
This call makes sure that only one file is left in the end.
|
|
@param conf job conf
|
|
@param logFilePath Path of the log file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logSubmitted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="jobConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="jobConfPath" type="java.lang.String"/>
|
|
<param name="submitTime" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Log job submitted event to history. Creates a new file in history
|
|
for the job. if history file creation fails, it disables history
|
|
for all other events.
|
|
@param jobId job id assigned by job tracker.
|
|
@param jobConf job conf of the job
|
|
@param jobConfPath path to job conf xml file in HDFS.
|
|
@param submitTime time when job tracker received the job
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logInited"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="startTime" type="long"/>
|
|
<param name="totalMaps" type="int"/>
|
|
<param name="totalReduces" type="int"/>
|
|
<doc>
|
|
<![CDATA[Logs launch time of job.
|
|
|
|
@param jobId job id, assigned by jobtracker.
|
|
@param startTime start time of job.
|
|
@param totalMaps total maps assigned by jobtracker.
|
|
@param totalReduces total reduces.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logStarted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use {@link #logInited(JobID, long, int, int)} and
|
|
{@link #logStarted(JobID)}">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="startTime" type="long"/>
|
|
<param name="totalMaps" type="int"/>
|
|
<param name="totalReduces" type="int"/>
|
|
<doc>
|
|
<![CDATA[Logs the job as RUNNING.
|
|
|
|
@param jobId job id, assigned by jobtracker.
|
|
@param startTime start time of job.
|
|
@param totalMaps total maps assigned by jobtracker.
|
|
@param totalReduces total reduces.
|
|
@deprecated Use {@link #logInited(JobID, long, int, int)} and
|
|
{@link #logStarted(JobID)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logStarted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<doc>
|
|
<![CDATA[Logs job as running
|
|
@param jobId job id, assigned by jobtracker.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFinished"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="finishTime" type="long"/>
|
|
<param name="finishedMaps" type="int"/>
|
|
<param name="finishedReduces" type="int"/>
|
|
<param name="failedMaps" type="int"/>
|
|
<param name="failedReduces" type="int"/>
|
|
<param name="counters" type="org.apache.hadoop.mapred.Counters"/>
|
|
<doc>
|
|
<![CDATA[Log job finished. closes the job file in history.
|
|
@param jobId job id, assigned by jobtracker.
|
|
@param finishTime finish time of job in ms.
|
|
@param finishedMaps no of maps successfully finished.
|
|
@param finishedReduces no of reduces finished sucessfully.
|
|
@param failedMaps no of failed map tasks.
|
|
@param failedReduces no of failed reduce tasks.
|
|
@param counters the counters from the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFailed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="finishedMaps" type="int"/>
|
|
<param name="finishedReduces" type="int"/>
|
|
<doc>
|
|
<![CDATA[Logs job failed event. Closes the job history log file.
|
|
@param jobid job id
|
|
@param timestamp time when job failure was detected in ms.
|
|
@param finishedMaps no finished map tasks.
|
|
@param finishedReduces no of finished reduce tasks.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logKilled"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="finishedMaps" type="int"/>
|
|
<param name="finishedReduces" type="int"/>
|
|
<doc>
|
|
<![CDATA[Logs job killed event. Closes the job history log file.
|
|
|
|
@param jobid
|
|
job id
|
|
@param timestamp
|
|
time when job killed was issued in ms.
|
|
@param finishedMaps
|
|
no finished map tasks.
|
|
@param finishedReduces
|
|
no of finished reduce tasks.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logJobPriority"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="priority" type="org.apache.hadoop.mapred.JobPriority"/>
|
|
<doc>
|
|
<![CDATA[Log job's priority.
|
|
@param jobid job id
|
|
@param priority Jobs priority]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logJobInfo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="submitTime" type="long"/>
|
|
<param name="launchTime" type="long"/>
|
|
<param name="restartCount" type="int"/>
|
|
<doc>
|
|
<![CDATA[Log job's submit-time/launch-time
|
|
@param jobid job id
|
|
@param submitTime job's submit time
|
|
@param launchTime job's launch time
|
|
@param restartCount number of times the job got restarted]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Helper class for logging or reading back events related to job start, finish or failure.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.JobInfo -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.Keys -->
|
|
<class name="JobHistory.Keys" extends="java.lang.Enum<org.apache.hadoop.mapred.JobHistory.Keys>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.JobHistory.Keys[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.JobHistory.Keys"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Job history files contain key="value" pairs, where keys belong to this enum.
|
|
It acts as a global namespace for all keys.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.Keys -->
|
|
<!-- start interface org.apache.hadoop.mapred.JobHistory.Listener -->
|
|
<interface name="JobHistory.Listener" abstract="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="handle"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="recType" type="org.apache.hadoop.mapred.JobHistory.RecordTypes"/>
|
|
<param name="values" type="java.util.Map<org.apache.hadoop.mapred.JobHistory.Keys, java.lang.String>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Callback method for history parser.
|
|
@param recType type of record, which is the first entry in the line.
|
|
@param values a map of key-value pairs as thry appear in history.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Callback interface for reading back log events from JobHistory. This interface
|
|
should be implemented and passed to JobHistory.parseHistory()]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.JobHistory.Listener -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.MapAttempt -->
|
|
<class name="JobHistory.MapAttempt" extends="org.apache.hadoop.mapred.JobHistory.TaskAttempt"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobHistory.MapAttempt"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="logStarted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logStarted(TaskAttemptID, long, String, int, String)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="startTime" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log start time of this map task attempt.
|
|
@param taskAttemptId task attempt id
|
|
@param startTime start time of task attempt as reported by task tracker.
|
|
@param hostName host name of the task attempt.
|
|
@deprecated Use
|
|
{@link #logStarted(TaskAttemptID, long, String, int, String)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logStarted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="startTime" type="long"/>
|
|
<param name="trackerName" type="java.lang.String"/>
|
|
<param name="httpPort" type="int"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log start time of this map task attempt.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param startTime start time of task attempt as reported by task tracker.
|
|
@param trackerName name of the tracker executing the task attempt.
|
|
@param httpPort http port of the task tracker executing the task attempt
|
|
@param taskType Whether the attempt is cleanup or setup or map]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFinished"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logFinished(TaskAttemptID, long, String, String, String, Counters)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="finishTime" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log finish time of map task attempt.
|
|
@param taskAttemptId task attempt id
|
|
@param finishTime finish time
|
|
@param hostName host name
|
|
@deprecated Use
|
|
{@link #logFinished(TaskAttemptID, long, String, String, String, Counters)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFinished"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="finishTime" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<param name="stateString" type="java.lang.String"/>
|
|
<param name="counter" type="org.apache.hadoop.mapred.Counters"/>
|
|
<doc>
|
|
<![CDATA[Log finish time of map task attempt.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param finishTime finish time
|
|
@param hostName host name
|
|
@param taskType Whether the attempt is cleanup or setup or map
|
|
@param stateString state string of the task attempt
|
|
@param counter counters of the task attempt]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFailed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logFailed(TaskAttemptID, long, String, String, String)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log task attempt failed event.
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp timestamp
|
|
@param hostName hostname of this task attempt.
|
|
@param error error message if any for this task attempt.
|
|
@deprecated Use
|
|
{@link #logFailed(TaskAttemptID, long, String, String, String)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFailed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log task attempt failed event.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp timestamp
|
|
@param hostName hostname of this task attempt.
|
|
@param error error message if any for this task attempt.
|
|
@param taskType Whether the attempt is cleanup or setup or map]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logKilled"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logKilled(TaskAttemptID, long, String, String, String)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log task attempt killed event.
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp timestamp
|
|
@param hostName hostname of this task attempt.
|
|
@param error error message if any for this task attempt.
|
|
@deprecated Use
|
|
{@link #logKilled(TaskAttemptID, long, String, String, String)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logKilled"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log task attempt killed event.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp timestamp
|
|
@param hostName hostname of this task attempt.
|
|
@param error error message if any for this task attempt.
|
|
@param taskType Whether the attempt is cleanup or setup or map]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Helper class for logging or reading back events related to start, finish or failure of
|
|
a Map Attempt on a node.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.MapAttempt -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.RecordTypes -->
|
|
<class name="JobHistory.RecordTypes" extends="java.lang.Enum<org.apache.hadoop.mapred.JobHistory.RecordTypes>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.JobHistory.RecordTypes[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.JobHistory.RecordTypes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Record types are identifiers for each line of log in history files.
|
|
A record type appears as the first token in a single line of log.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.RecordTypes -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.ReduceAttempt -->
|
|
<class name="JobHistory.ReduceAttempt" extends="org.apache.hadoop.mapred.JobHistory.TaskAttempt"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobHistory.ReduceAttempt"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="logStarted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logStarted(TaskAttemptID, long, String, int, String)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="startTime" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log start time of Reduce task attempt.
|
|
@param taskAttemptId task attempt id
|
|
@param startTime start time
|
|
@param hostName host name
|
|
@deprecated Use
|
|
{@link #logStarted(TaskAttemptID, long, String, int, String)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logStarted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="startTime" type="long"/>
|
|
<param name="trackerName" type="java.lang.String"/>
|
|
<param name="httpPort" type="int"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log start time of Reduce task attempt.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param startTime start time
|
|
@param trackerName tracker name
|
|
@param httpPort the http port of the tracker executing the task attempt
|
|
@param taskType Whether the attempt is cleanup or setup or reduce]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFinished"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logFinished(TaskAttemptID, long, long, long, String, String, String, Counters)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="shuffleFinished" type="long"/>
|
|
<param name="sortFinished" type="long"/>
|
|
<param name="finishTime" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log finished event of this task.
|
|
@param taskAttemptId task attempt id
|
|
@param shuffleFinished shuffle finish time
|
|
@param sortFinished sort finish time
|
|
@param finishTime finish time of task
|
|
@param hostName host name where task attempt executed
|
|
@deprecated Use
|
|
{@link #logFinished(TaskAttemptID, long, long, long, String, String, String, Counters)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFinished"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="shuffleFinished" type="long"/>
|
|
<param name="sortFinished" type="long"/>
|
|
<param name="finishTime" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<param name="stateString" type="java.lang.String"/>
|
|
<param name="counter" type="org.apache.hadoop.mapred.Counters"/>
|
|
<doc>
|
|
<![CDATA[Log finished event of this task.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param shuffleFinished shuffle finish time
|
|
@param sortFinished sort finish time
|
|
@param finishTime finish time of task
|
|
@param hostName host name where task attempt executed
|
|
@param taskType Whether the attempt is cleanup or setup or reduce
|
|
@param stateString the state string of the attempt
|
|
@param counter counters of the attempt]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFailed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logFailed(TaskAttemptID, long, String, String, String)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log failed reduce task attempt.
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp time stamp when task failed
|
|
@param hostName host name of the task attempt.
|
|
@param error error message of the task.
|
|
@deprecated Use
|
|
{@link #logFailed(TaskAttemptID, long, String, String, String)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFailed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log failed reduce task attempt.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp time stamp when task failed
|
|
@param hostName host name of the task attempt.
|
|
@param error error message of the task.
|
|
@param taskType Whether the attempt is cleanup or setup or reduce]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logKilled"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use
|
|
{@link #logKilled(TaskAttemptID, long, String, String, String)}">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log killed reduce task attempt.
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp time stamp when task failed
|
|
@param hostName host name of the task attempt.
|
|
@param error error message of the task.
|
|
@deprecated Use
|
|
{@link #logKilled(TaskAttemptID, long, String, String, String)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logKilled"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskAttemptId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="timestamp" type="long"/>
|
|
<param name="hostName" type="java.lang.String"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log killed reduce task attempt.
|
|
|
|
@param taskAttemptId task attempt id
|
|
@param timestamp time stamp when task failed
|
|
@param hostName host name of the task attempt.
|
|
@param error error message of the task.
|
|
@param taskType Whether the attempt is cleanup or setup or reduce]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Helper class for logging or reading back events related to start, finish or failure of
|
|
a Map Attempt on a node.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.ReduceAttempt -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.Task -->
|
|
<class name="JobHistory.Task" extends="org.apache.hadoop.mapred.JobHistory.KeyValuePair"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobHistory.Task"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="logStarted"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskID"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<param name="startTime" type="long"/>
|
|
<param name="splitLocations" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log start time of task (TIP).
|
|
@param taskId task id
|
|
@param taskType MAP or REDUCE
|
|
@param startTime startTime of tip.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFinished"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskID"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<param name="finishTime" type="long"/>
|
|
<param name="counters" type="org.apache.hadoop.mapred.Counters"/>
|
|
<doc>
|
|
<![CDATA[Log finish time of task.
|
|
@param taskId task id
|
|
@param taskType MAP or REDUCE
|
|
@param finishTime finish timeof task in ms]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFailed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskID"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<param name="time" type="long"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Log job failed event.
|
|
@param taskId task id
|
|
@param taskType MAP or REDUCE.
|
|
@param time timestamp when job failed detected.
|
|
@param error error message for failure.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logFailed"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskID"/>
|
|
<param name="taskType" type="java.lang.String"/>
|
|
<param name="time" type="long"/>
|
|
<param name="error" type="java.lang.String"/>
|
|
<param name="failedDueToAttempt" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<doc>
|
|
<![CDATA[@param failedDueToAttempt The attempt that caused the failure, if any]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskAttempts" return="java.util.Map<java.lang.String, org.apache.hadoop.mapred.JobHistory.TaskAttempt>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns all task attempts for this task. <task attempt id - TaskAttempt>]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Helper class for logging or reading back events related to Task's start, finish or failure.
|
|
All events logged by this class are logged in a separate file per job in
|
|
job tracker history. These events map to TIPs in jobtracker.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.Task -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.TaskAttempt -->
|
|
<class name="JobHistory.TaskAttempt" extends="org.apache.hadoop.mapred.JobHistory.Task"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobHistory.TaskAttempt"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Base class for Map and Reduce TaskAttempts.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.TaskAttempt -->
|
|
<!-- start class org.apache.hadoop.mapred.JobHistory.Values -->
|
|
<class name="JobHistory.Values" extends="java.lang.Enum<org.apache.hadoop.mapred.JobHistory.Values>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.JobHistory.Values[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.JobHistory.Values"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This enum contains some of the values commonly used by history log events.
|
|
since values in history can only be strings - Values.name() is used in
|
|
most places in history file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobHistory.Values -->
|
|
<!-- start class org.apache.hadoop.mapred.JobID -->
|
|
<class name="JobID" extends="org.apache.hadoop.mapred.ID"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobID" type="java.lang.String, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a JobID object
|
|
@param jtIdentifier jobTracker identifier
|
|
@param id job number]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getJtIdentifier" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="org.apache.hadoop.mapred.ID"/>
|
|
<doc>
|
|
<![CDATA[Compare JobIds by first jtIdentifiers, then by job numbers]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="forName" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<exception name="IllegalArgumentException" type="java.lang.IllegalArgumentException"/>
|
|
<doc>
|
|
<![CDATA[Construct a JobId object from given string
|
|
@return constructed JobId object or null if the given String is null
|
|
@throws IllegalArgumentException if the given string is malformed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobIDsPattern" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jtIdentifier" type="java.lang.String"/>
|
|
<param name="jobId" type="java.lang.Integer"/>
|
|
<doc>
|
|
<![CDATA[Returns a regex pattern which matches task IDs. Arguments can
|
|
be given null, in which case that part of the regex will be generic.
|
|
For example to obtain a regex matching <i>any job</i>
|
|
run on the jobtracker started at <i>200707121733</i>, we would use :
|
|
<pre>
|
|
JobID.getTaskIDsPattern("200707121733", null);
|
|
</pre>
|
|
which will return :
|
|
<pre> "job_200707121733_[0-9]*" </pre>
|
|
@param jtIdentifier jobTracker identifier, or null
|
|
@param jobId job number, or null
|
|
@return a regex pattern matching JobIDs]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[JobID represents the immutable and unique identifier for
|
|
the job. JobID consists of two parts. First part
|
|
represents the jobtracker identifier, so that jobID to jobtracker map
|
|
is defined. For cluster setup this string is the jobtracker
|
|
start time, for local setting, it is "local".
|
|
Second part of the JobID is the job number. <br>
|
|
An example JobID is :
|
|
<code>job_200707121733_0003</code> , which represents the third job
|
|
running at the jobtracker started at <code>200707121733</code>.
|
|
<p>
|
|
Applications should never construct or parse JobID strings, but rather
|
|
use appropriate constructors or {@link #forName(String)} method.
|
|
|
|
@see TaskID
|
|
@see TaskAttemptID
|
|
@see JobTracker#getNewJobId()
|
|
@see JobTracker#getStartTime()]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobID -->
|
|
<!-- start class org.apache.hadoop.mapred.JobPriority -->
|
|
<class name="JobPriority" extends="java.lang.Enum<org.apache.hadoop.mapred.JobPriority>"
|
|
abstract="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.JobPriority[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.JobPriority"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Used to describe the priority of the running job.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobPriority -->
|
|
<!-- start class org.apache.hadoop.mapred.JobProfile -->
|
|
<class name="JobProfile" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="JobProfile"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct an empty {@link JobProfile}.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobProfile" type="java.lang.String, org.apache.hadoop.mapred.JobID, java.lang.String, java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a {@link JobProfile} the userid, jobid,
|
|
job config-file, job-details url and job name.
|
|
|
|
@param user userid of the person who submitted the job.
|
|
@param jobid id of the job.
|
|
@param jobFile job configuration file.
|
|
@param url link to the web-ui for details of the job.
|
|
@param name user-specified job name.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobProfile" type="java.lang.String, org.apache.hadoop.mapred.JobID, java.lang.String, java.lang.String, java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a {@link JobProfile} the userid, jobid,
|
|
job config-file, job-details url and job name.
|
|
|
|
@param user userid of the person who submitted the job.
|
|
@param jobid id of the job.
|
|
@param jobFile job configuration file.
|
|
@param url link to the web-ui for details of the job.
|
|
@param name user-specified job name.
|
|
@param queueName name of the queue to which the job is submitted]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobProfile" type="java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use JobProfile(String, JobID, String, String, String) instead">
|
|
<doc>
|
|
<![CDATA[@deprecated use JobProfile(String, JobID, String, String, String) instead]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getUser" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the user id.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobID" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the job id.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobId" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use getJobID() instead">
|
|
<doc>
|
|
<![CDATA[@deprecated use getJobID() instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobFile" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the configuration file for the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getURL" return="java.net.URL"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the link to the web-ui for details of the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the user-specified job name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getQueueName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the name of the queue to which the job is submitted.
|
|
@return name of the queue.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A JobProfile is a MapReduce primitive. Tracks a job,
|
|
whether living or dead.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobProfile -->
|
|
<!-- start class org.apache.hadoop.mapred.JobQueueInfo -->
|
|
<class name="JobQueueInfo" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="JobQueueInfo"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default constructor for Job Queue Info.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobQueueInfo" type="java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a new JobQueueInfo object using the queue name and the
|
|
scheduling information passed.
|
|
|
|
@param queueName Name of the job queue
|
|
@param schedulingInfo Scheduling Information associated with the job
|
|
queue]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setQueueName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="queueName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the queue name of the JobQueueInfo
|
|
|
|
@param queueName Name of the job queue.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getQueueName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the queue name from JobQueueInfo
|
|
|
|
@return queue name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSchedulingInfo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="schedulingInfo" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the scheduling information associated to particular job queue
|
|
|
|
@param schedulingInfo]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSchedulingInfo" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Gets the scheduling information associated to particular job queue.
|
|
If nothing is set would return <b>"N/A"</b>
|
|
|
|
@return Scheduling information associated to particular Job Queue]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Class that contains the information regarding the Job Queues which are
|
|
maintained by the Hadoop Map/Reduce framework.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobQueueInfo -->
|
|
<!-- start class org.apache.hadoop.mapred.JobShell -->
|
|
<class name="JobShell" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="JobShell"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="JobShell" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="init"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[run method from Tool]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Provide command line parsing for JobSubmission
|
|
job submission looks like
|
|
hadoop jar -libjars <comma seperated jars> -archives <comma seperated archives>
|
|
-files <comma seperated files> inputjar args]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobShell -->
|
|
<!-- start class org.apache.hadoop.mapred.JobStatus -->
|
|
<class name="JobStatus" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="java.lang.Cloneable"/>
|
|
<constructor name="JobStatus"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="JobStatus" type="org.apache.hadoop.mapred.JobID, float, float, float, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a job status object for a given jobid.
|
|
@param jobid The jobid of the job
|
|
@param mapProgress The progress made on the maps
|
|
@param reduceProgress The progress made on the reduces
|
|
@param cleanupProgress The progress made on cleanup
|
|
@param runState The current state of the job]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobStatus" type="org.apache.hadoop.mapred.JobID, float, float, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a job status object for a given jobid.
|
|
@param jobid The jobid of the job
|
|
@param mapProgress The progress made on the maps
|
|
@param reduceProgress The progress made on the reduces
|
|
@param runState The current state of the job]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobStatus" type="org.apache.hadoop.mapred.JobID, float, float, float, int, org.apache.hadoop.mapred.JobPriority"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a job status object for a given jobid.
|
|
@param jobid The jobid of the job
|
|
@param mapProgress The progress made on the maps
|
|
@param reduceProgress The progress made on the reduces
|
|
@param runState The current state of the job
|
|
@param jp Priority of the job.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="JobStatus" type="org.apache.hadoop.mapred.JobID, float, float, float, float, int, org.apache.hadoop.mapred.JobPriority"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a job status object for a given jobid.
|
|
@param jobid The jobid of the job
|
|
@param setupProgress The progress made on the setup
|
|
@param mapProgress The progress made on the maps
|
|
@param reduceProgress The progress made on the reduces
|
|
@param cleanupProgress The progress made on the cleanup
|
|
@param runState The current state of the job
|
|
@param jp Priority of the job.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getJobId" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use getJobID instead">
|
|
<doc>
|
|
<![CDATA[@deprecated use getJobID instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobID" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return The jobid of the Job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mapProgress" return="float"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return Percentage of progress in maps]]>
|
|
</doc>
|
|
</method>
|
|
<method name="cleanupProgress" return="float"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return Percentage of progress in cleanup]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setupProgress" return="float"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return Percentage of progress in setup]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reduceProgress" return="float"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return Percentage of progress in reduce]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRunState" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return running state of the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setRunState"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="state" type="int"/>
|
|
<doc>
|
|
<![CDATA[Change the current run state of the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStartTime" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return start time of the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="clone" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getUsername" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the username of the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSchedulingInfo" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Gets the Scheduling information associated to a particular Job.
|
|
@return the scheduling information of the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSchedulingInfo"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="schedulingInfo" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Used to set the scheduling information associated to a particular Job.
|
|
|
|
@param schedulingInfo Scheduling information of the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobPriority" return="org.apache.hadoop.mapred.JobPriority"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the priority of the job
|
|
@return job priority]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobPriority"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jp" type="org.apache.hadoop.mapred.JobPriority"/>
|
|
<doc>
|
|
<![CDATA[Set the priority of the job, defaulting to NORMAL.
|
|
@param jp new job priority]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="RUNNING" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="SUCCEEDED" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="FAILED" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="PREP" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="KILLED" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Describes the current status of a job. This is
|
|
not intended to be a comprehensive piece of data.
|
|
For that, look at JobProfile.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobStatus -->
|
|
<!-- start class org.apache.hadoop.mapred.JobTracker -->
|
|
<class name="JobTracker" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.MRConstants"/>
|
|
<implements name="org.apache.hadoop.mapred.InterTrackerProtocol"/>
|
|
<implements name="org.apache.hadoop.mapred.JobSubmissionProtocol"/>
|
|
<implements name="org.apache.hadoop.mapred.TaskTrackerManager"/>
|
|
<method name="startTracker" return="org.apache.hadoop.mapred.JobTracker"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<doc>
|
|
<![CDATA[Start the JobTracker with given configuration.
|
|
|
|
The conf will be modified to reflect the actual ports on which
|
|
the JobTracker is up and running if the user passes the port as
|
|
<code>zero</code>.
|
|
|
|
@param conf configuration for the JobTracker.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stopTracker"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getProtocolVersion" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.String"/>
|
|
<param name="clientVersion" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="hasRestarted" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Whether the JT has restarted]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hasRecovered" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Whether the JT has recovered upon restart]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecoveryDuration" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[How long the jobtracker took to recover from restart.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInstrumentationClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.JobTrackerInstrumentation>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="setInstrumentationClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="t" type="java.lang.Class<? extends org.apache.hadoop.mapred.JobTrackerInstrumentation>"/>
|
|
</method>
|
|
<method name="getAddress" return="java.net.InetSocketAddress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="offerService"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Run forever]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTotalSubmissions" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getJobTrackerMachine" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getTrackerIdentifier" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the unique identifier (ie. timestamp) of this job tracker start.
|
|
@return a string with a unique identifier]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTrackerPort" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getInfoPort" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getStartTime" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="runningJobs" return="java.util.Vector<org.apache.hadoop.mapred.JobInProgress>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getRunningJobs" return="java.util.List<org.apache.hadoop.mapred.JobInProgress>"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Version that is called from a timer thread, and therefore needs to be
|
|
careful to synchronize.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="failedJobs" return="java.util.Vector<org.apache.hadoop.mapred.JobInProgress>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="completedJobs" return="java.util.Vector<org.apache.hadoop.mapred.JobInProgress>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="taskTrackers" return="java.util.Collection<org.apache.hadoop.mapred.TaskTrackerStatus>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getTaskTracker" return="org.apache.hadoop.mapred.TaskTrackerStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="trackerID" type="java.lang.String"/>
|
|
</method>
|
|
<method name="resolveAndAddToTopology" return="org.apache.hadoop.net.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<method name="getNodesAtMaxLevel" return="java.util.Collection<org.apache.hadoop.net.Node>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a collection of nodes at the max level]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getParentNode" return="org.apache.hadoop.net.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="node" type="org.apache.hadoop.net.Node"/>
|
|
<param name="level" type="int"/>
|
|
</method>
|
|
<method name="getNode" return="org.apache.hadoop.net.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Return the Node in the network topology that corresponds to the hostname]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumTaskCacheLevels" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getNumResolvedTaskTrackers" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getNumberOfUniqueHosts" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="addJobInProgressListener"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="listener" type="org.apache.hadoop.mapred.JobInProgressListener"/>
|
|
</method>
|
|
<method name="removeJobInProgressListener"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="listener" type="org.apache.hadoop.mapred.JobInProgressListener"/>
|
|
</method>
|
|
<method name="getQueueManager" return="org.apache.hadoop.mapred.QueueManager"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the {@link QueueManager} associated with the JobTracker.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBuildVersion" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="heartbeat" return="org.apache.hadoop.mapred.HeartbeatResponse"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="status" type="org.apache.hadoop.mapred.TaskTrackerStatus"/>
|
|
<param name="initialContact" type="boolean"/>
|
|
<param name="acceptNewTasks" type="boolean"/>
|
|
<param name="responseId" type="short"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The periodic heartbeat mechanism between the {@link TaskTracker} and
|
|
the {@link JobTracker}.
|
|
|
|
The {@link JobTracker} processes the status information sent by the
|
|
{@link TaskTracker} and responds with instructions to start/stop
|
|
tasks or jobs, and also 'reset' instructions during contingencies.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNextHeartbeatInterval" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Calculates next heartbeat interval using cluster size.
|
|
Heartbeat interval is incremented 1second for every 50 nodes.
|
|
@return next heartbeat interval.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFilesystemName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Grab the local fs name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reportTaskTrackerError"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskTracker" type="java.lang.String"/>
|
|
<param name="errorClass" type="java.lang.String"/>
|
|
<param name="errorMessage" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getNewJobId" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Allocates a new JobId string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="submitJob" return="org.apache.hadoop.mapred.JobStatus"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[JobTracker.submitJob() kicks off a new job.
|
|
|
|
Create a 'JobInProgress' object, which contains both JobProfile
|
|
and JobStatus. Those two sub-objects are sometimes shipped outside
|
|
of the JobTracker. But JobInProgress adds info that's useful for
|
|
the JobTracker alone.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClusterStatus" return="org.apache.hadoop.mapred.ClusterStatus"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="killJob"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setJobPriority"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="priority" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set the priority of a job
|
|
@param jobid id of the job
|
|
@param priority new priority of the job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobProfile" return="org.apache.hadoop.mapred.JobProfile"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getJobStatus" return="org.apache.hadoop.mapred.JobStatus"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getJobCounters" return="org.apache.hadoop.mapred.Counters"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getMapTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getReduceTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getCleanupTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getSetupTaskReports" return="org.apache.hadoop.mapred.TaskReport[]"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getTaskCompletionEvents" return="org.apache.hadoop.mapred.TaskCompletionEvent[]"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="fromEventId" type="int"/>
|
|
<param name="maxEvents" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getTaskDiagnostics" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the diagnostics for a given task
|
|
@param taskId the id of the task
|
|
@return an array of the diagnostic messages]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTip" return="org.apache.hadoop.mapred.TaskInProgress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tipid" type="org.apache.hadoop.mapred.TaskID"/>
|
|
<doc>
|
|
<![CDATA[Returns specified TaskInProgress, or null.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="killTask" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="shouldFail" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark a Task to be killed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAssignedTracker" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<doc>
|
|
<![CDATA[Get tracker name for a given task id.
|
|
@param taskId the name of the task
|
|
@return The name of the task tracker]]>
|
|
</doc>
|
|
</method>
|
|
<method name="jobsToComplete" return="org.apache.hadoop.mapred.JobStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getAllJobs" return="org.apache.hadoop.mapred.JobStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getSystemDir" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@see org.apache.hadoop.mapred.JobSubmissionProtocol#getSystemDir()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJob" return="org.apache.hadoop.mapred.JobInProgress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobid" type="org.apache.hadoop.mapred.JobID"/>
|
|
</method>
|
|
<method name="getLocalJobFilePath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<doc>
|
|
<![CDATA[Get the localized job file path on the job trackers local file system
|
|
@param jobId id of the job
|
|
@return the path of the job conf file on the local file system]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="InterruptedException" type="java.lang.InterruptedException"/>
|
|
<doc>
|
|
<![CDATA[Start the JobTracker process. This is used only for debugging. As a rule,
|
|
JobTracker should be run as part of the DFS Namenode process.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getQueues" return="org.apache.hadoop.mapred.JobQueueInfo[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getQueueInfo" return="org.apache.hadoop.mapred.JobQueueInfo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="queue" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getJobsFromQueue" return="org.apache.hadoop.mapred.JobStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="queue" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[JobTracker is the central location for submitting and
|
|
tracking MR jobs in a network environment.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobTracker -->
|
|
<!-- start class org.apache.hadoop.mapred.JobTracker.IllegalStateException -->
|
|
<class name="JobTracker.IllegalStateException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JobTracker.IllegalStateException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[A client tried to submit a job before the Job Tracker was ready.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobTracker.IllegalStateException -->
|
|
<!-- start class org.apache.hadoop.mapred.JobTracker.State -->
|
|
<class name="JobTracker.State" extends="java.lang.Enum<org.apache.hadoop.mapred.JobTracker.State>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.JobTracker.State[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.JobTracker.State"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.JobTracker.State -->
|
|
<!-- start class org.apache.hadoop.mapred.KeyValueLineRecordReader -->
|
|
<class name="KeyValueLineRecordReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<constructor name="KeyValueLineRecordReader" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapred.FileSplit"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="getKeyClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createKey" return="org.apache.hadoop.io.Text"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createValue" return="org.apache.hadoop.io.Text"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="findSeparator" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<param name="sep" type="byte"/>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Text"/>
|
|
<param name="value" type="org.apache.hadoop.io.Text"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read key/value pair in a line.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class treats a line in the input as a key/value pair separated by a
|
|
separator character. The separator can be specified in config file
|
|
under the attribute name key.value.separator.in.input.line. The default
|
|
separator is the tab character ('\t').]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.KeyValueLineRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.KeyValueTextInputFormat -->
|
|
<class name="KeyValueTextInputFormat" extends="org.apache.hadoop.mapred.FileInputFormat<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<constructor name="KeyValueTextInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="isSplitable" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="genericSplit" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link InputFormat} for plain text files. Files are broken into lines.
|
|
Either linefeed or carriage-return are used to signal end of line. Each line
|
|
is divided into key and value parts by a separator byte. If no such a byte
|
|
exists, the key will be the entire line and value will be empty.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.KeyValueTextInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.LineRecordReader -->
|
|
<class name="LineRecordReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.LongWritable, org.apache.hadoop.io.Text>"/>
|
|
<constructor name="LineRecordReader" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapred.FileSplit"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<constructor name="LineRecordReader" type="java.io.InputStream, long, long, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="LineRecordReader" type="java.io.InputStream, long, long, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="createKey" return="org.apache.hadoop.io.LongWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createValue" return="org.apache.hadoop.io.Text"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.LongWritable"/>
|
|
<param name="value" type="org.apache.hadoop.io.Text"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a line.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the progress within the split]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Treats keys as offset in file and value as line.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.LineRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.LineRecordReader.LineReader -->
|
|
<class name="LineRecordReader.LineReader" extends="org.apache.hadoop.util.LineReader"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use {@link org.apache.hadoop.util.LineReader} instead.">
|
|
<constructor name="LineRecordReader.LineReader" type="java.io.InputStream, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[A class that provides a line reader from an input stream.
|
|
@deprecated Use {@link org.apache.hadoop.util.LineReader} instead.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.LineRecordReader.LineReader -->
|
|
<!-- start class org.apache.hadoop.mapred.MapFileOutputFormat -->
|
|
<class name="MapFileOutputFormat" extends="org.apache.hadoop.mapred.FileOutputFormat<org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MapFileOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getReaders" return="org.apache.hadoop.io.MapFile.Reader[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Open the output generated by this format.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getEntry" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="readers" type="org.apache.hadoop.io.MapFile.Reader[]"/>
|
|
<param name="partitioner" type="org.apache.hadoop.mapred.Partitioner<K, V>"/>
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="V extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get an entry from output generated by this class.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link OutputFormat} that writes {@link MapFile}s.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.MapFileOutputFormat -->
|
|
<!-- start interface org.apache.hadoop.mapred.Mapper -->
|
|
<interface name="Mapper" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<implements name="org.apache.hadoop.io.Closeable"/>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K1"/>
|
|
<param name="value" type="V1"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K2, V2>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Maps a single input key/value pair into an intermediate key/value pair.
|
|
|
|
<p>Output pairs need not be of the same types as input pairs. A given
|
|
input pair may map to zero or many output pairs. Output pairs are
|
|
collected with calls to
|
|
{@link OutputCollector#collect(Object,Object)}.</p>
|
|
|
|
<p>Applications can use the {@link Reporter} provided to report progress
|
|
or just indicate that they are alive. In scenarios where the application
|
|
takes an insignificant amount of time to process individual key/value
|
|
pairs, this is crucial since the framework might assume that the task has
|
|
timed-out and kill that task. The other way of avoiding this is to set
|
|
<a href="{@docRoot}/../hadoop-default.html#mapred.task.timeout">
|
|
mapred.task.timeout</a> to a high-enough value (or even zero for no
|
|
time-outs).</p>
|
|
|
|
@param key the input key.
|
|
@param value the input value.
|
|
@param output collects mapped keys and values.
|
|
@param reporter facility to report progress.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Maps input key/value pairs to a set of intermediate key/value pairs.
|
|
|
|
<p>Maps are the individual tasks which transform input records into a
|
|
intermediate records. The transformed intermediate records need not be of
|
|
the same type as the input records. A given input pair may map to zero or
|
|
many output pairs.</p>
|
|
|
|
<p>The Hadoop Map-Reduce framework spawns one map task for each
|
|
{@link InputSplit} generated by the {@link InputFormat} for the job.
|
|
<code>Mapper</code> implementations can access the {@link JobConf} for the
|
|
job via the {@link JobConfigurable#configure(JobConf)} and initialize
|
|
themselves. Similarly they can use the {@link Closeable#close()} method for
|
|
de-initialization.</p>
|
|
|
|
<p>The framework then calls
|
|
{@link #map(Object, Object, OutputCollector, Reporter)}
|
|
for each key/value pair in the <code>InputSplit</code> for that task.</p>
|
|
|
|
<p>All intermediate values associated with a given output key are
|
|
subsequently grouped by the framework, and passed to a {@link Reducer} to
|
|
determine the final output. Users can control the grouping by specifying
|
|
a <code>Comparator</code> via
|
|
{@link JobConf#setOutputKeyComparatorClass(Class)}.</p>
|
|
|
|
<p>The grouped <code>Mapper</code> outputs are partitioned per
|
|
<code>Reducer</code>. Users can control which keys (and hence records) go to
|
|
which <code>Reducer</code> by implementing a custom {@link Partitioner}.
|
|
|
|
<p>Users can optionally specify a <code>combiner</code>, via
|
|
{@link JobConf#setCombinerClass(Class)}, to perform local aggregation of the
|
|
intermediate outputs, which helps to cut down the amount of data transferred
|
|
from the <code>Mapper</code> to the <code>Reducer</code>.
|
|
|
|
<p>The intermediate, grouped outputs are always stored in
|
|
{@link SequenceFile}s. Applications can specify if and how the intermediate
|
|
outputs are to be compressed and which {@link CompressionCodec}s are to be
|
|
used via the <code>JobConf</code>.</p>
|
|
|
|
<p>If the job has
|
|
<a href="{@docRoot}/org/apache/hadoop/mapred/JobConf.html#ReducerNone">zero
|
|
reduces</a> then the output of the <code>Mapper</code> is directly written
|
|
to the {@link FileSystem} without grouping by keys.</p>
|
|
|
|
<p>Example:</p>
|
|
<p><blockquote><pre>
|
|
public class MyMapper<K extends WritableComparable, V extends Writable>
|
|
extends MapReduceBase implements Mapper<K, V, K, V> {
|
|
|
|
static enum MyCounters { NUM_RECORDS }
|
|
|
|
private String mapTaskId;
|
|
private String inputFile;
|
|
private int noRecords = 0;
|
|
|
|
public void configure(JobConf job) {
|
|
mapTaskId = job.get("mapred.task.id");
|
|
inputFile = job.get("mapred.input.file");
|
|
}
|
|
|
|
public void map(K key, V val,
|
|
OutputCollector<K, V> output, Reporter reporter)
|
|
throws IOException {
|
|
// Process the <key, value> pair (assume this takes a while)
|
|
// ...
|
|
// ...
|
|
|
|
// Let the framework know that we are alive, and kicking!
|
|
// reporter.progress();
|
|
|
|
// Process some more
|
|
// ...
|
|
// ...
|
|
|
|
// Increment the no. of <key, value> pairs processed
|
|
++noRecords;
|
|
|
|
// Increment counters
|
|
reporter.incrCounter(NUM_RECORDS, 1);
|
|
|
|
// Every 100 records update application-level status
|
|
if ((noRecords%100) == 0) {
|
|
reporter.setStatus(mapTaskId + " processed " + noRecords +
|
|
" from input-file: " + inputFile);
|
|
}
|
|
|
|
// Output the result
|
|
output.collect(key, val);
|
|
}
|
|
}
|
|
</pre></blockquote></p>
|
|
|
|
<p>Applications may write a custom {@link MapRunnable} to exert greater
|
|
control on map processing e.g. multi-threaded <code>Mapper</code>s etc.</p>
|
|
|
|
@see JobConf
|
|
@see InputFormat
|
|
@see Partitioner
|
|
@see Reducer
|
|
@see MapReduceBase
|
|
@see MapRunnable
|
|
@see SequenceFile]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.Mapper -->
|
|
<!-- start class org.apache.hadoop.mapred.MapReduceBase -->
|
|
<class name="MapReduceBase" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Closeable"/>
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<constructor name="MapReduceBase"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Default implementation that does nothing.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Default implementation that does nothing.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Base class for {@link Mapper} and {@link Reducer} implementations.
|
|
|
|
<p>Provides default no-op implementations for a few methods, most non-trivial
|
|
applications need to override some of them.</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.MapReduceBase -->
|
|
<!-- start interface org.apache.hadoop.mapred.MapRunnable -->
|
|
<interface name="MapRunnable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="org.apache.hadoop.mapred.RecordReader<K1, V1>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K2, V2>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Start mapping input <tt><key, value></tt> pairs.
|
|
|
|
<p>Mapping of input records to output records is complete when this method
|
|
returns.</p>
|
|
|
|
@param input the {@link RecordReader} to read the input records.
|
|
@param output the {@link OutputCollector} to collect the outputrecords.
|
|
@param reporter {@link Reporter} to report progress, status-updates etc.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Expert: Generic interface for {@link Mapper}s.
|
|
|
|
<p>Custom implementations of <code>MapRunnable</code> can exert greater
|
|
control on map processing e.g. multi-threaded, asynchronous mappers etc.</p>
|
|
|
|
@see Mapper]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.MapRunnable -->
|
|
<!-- start class org.apache.hadoop.mapred.MapRunner -->
|
|
<class name="MapRunner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.MapRunnable<K1, V1, K2, V2>"/>
|
|
<constructor name="MapRunner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="org.apache.hadoop.mapred.RecordReader<K1, V1>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K2, V2>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getMapper" return="org.apache.hadoop.mapred.Mapper<K1, V1, K2, V2>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Default {@link MapRunnable} implementation.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.MapRunner -->
|
|
<!-- start class org.apache.hadoop.mapred.MultiFileInputFormat -->
|
|
<class name="MultiFileInputFormat" extends="org.apache.hadoop.mapred.FileInputFormat<K, V>"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MultiFileInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getSplits" return="org.apache.hadoop.mapred.InputSplit[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="numSplits" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<K, V>"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An abstract {@link InputFormat} that returns {@link MultiFileSplit}'s
|
|
in {@link #getSplits(JobConf, int)} method. Splits are constructed from
|
|
the files under the input paths. Each split returned contains <i>nearly</i>
|
|
equal content length. <br>
|
|
Subclasses implement {@link #getRecordReader(InputSplit, JobConf, Reporter)}
|
|
to construct <code>RecordReader</code>'s for <code>MultiFileSplit</code>'s.
|
|
@see MultiFileSplit]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.MultiFileInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.MultiFileSplit -->
|
|
<class name="MultiFileSplit" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputSplit"/>
|
|
<constructor name="MultiFileSplit" type="org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path[], long[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getLengths" return="long[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns an array containing the lengths of the files in
|
|
the split]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<doc>
|
|
<![CDATA[Returns the length of the i<sup>th</sup> Path]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumPaths" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the number of Paths in the split]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<doc>
|
|
<![CDATA[Returns the i<sup>th</sup> Path]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPaths" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns all the Paths in the split]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocations" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A sub-collection of input files. Unlike {@link FileSplit}, MultiFileSplit
|
|
class does not represent a split of a file, but a split of input files
|
|
into smaller sets. The atomic unit of split is a file. <br>
|
|
MultiFileSplit can be used to implement {@link RecordReader}'s, with
|
|
reading one record per file.
|
|
@see FileSplit
|
|
@see MultiFileInputFormat]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.MultiFileSplit -->
|
|
<!-- start interface org.apache.hadoop.mapred.OutputCollector -->
|
|
<interface name="OutputCollector" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="collect"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Adds a key/value pair to the output.
|
|
|
|
@param key the key to collect.
|
|
@param value to value to collect.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Collects the <code><key, value></code> pairs output by {@link Mapper}s
|
|
and {@link Reducer}s.
|
|
|
|
<p><code>OutputCollector</code> is the generalization of the facility
|
|
provided by the Map-Reduce framework to collect data output by either the
|
|
<code>Mapper</code> or the <code>Reducer</code> i.e. intermediate outputs
|
|
or the output of the job.</p>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.OutputCollector -->
|
|
<!-- start class org.apache.hadoop.mapred.OutputCommitter -->
|
|
<class name="OutputCommitter" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="OutputCommitter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setupJob"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobContext" type="org.apache.hadoop.mapred.JobContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[For the framework to setup the job output during initialization
|
|
|
|
@param jobContext Context of the job whose output is being written.
|
|
@throws IOException if temporary output could not be created]]>
|
|
</doc>
|
|
</method>
|
|
<method name="cleanupJob"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobContext" type="org.apache.hadoop.mapred.JobContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[For cleaning up the job's output after job completion
|
|
|
|
@param jobContext Context of the job whose output is being written.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setupTask"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskContext" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Sets up output for the task.
|
|
|
|
@param taskContext Context of the task whose output is being written.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="needsTaskCommit" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskContext" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check whether task needs a commit
|
|
|
|
@param taskContext
|
|
@return true/false
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="commitTask"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskContext" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[To promote the task's temporary output to final output location
|
|
|
|
The task's output is moved to the job's output directory.
|
|
|
|
@param taskContext Context of the task whose output is being written.
|
|
@throws IOException if commit is not]]>
|
|
</doc>
|
|
</method>
|
|
<method name="abortTask"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskContext" type="org.apache.hadoop.mapred.TaskAttemptContext"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Discard the task output
|
|
|
|
@param taskContext
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>OutputCommitter</code> describes the commit of task output for a
|
|
Map-Reduce job.
|
|
|
|
<p>The Map-Reduce framework relies on the <code>OutputCommitter</code> of
|
|
the job to:<p>
|
|
<ol>
|
|
<li>
|
|
Setup the job during initialization. For example, create the temporary
|
|
output directory for the job during the initialization of the job.
|
|
</li>
|
|
<li>
|
|
Cleanup the job after the job completion. For example, remove the
|
|
temporary output directory after the job completion.
|
|
</li>
|
|
<li>
|
|
Setup the task temporary output.
|
|
</li>
|
|
<li>
|
|
Check whether a task needs a commit. This is to avoid the commit
|
|
procedure if a task does not need commit.
|
|
</li>
|
|
<li>
|
|
Commit of the task output.
|
|
</li>
|
|
<li>
|
|
Discard the task commit.
|
|
</li>
|
|
</ol>
|
|
|
|
@see FileOutputCommitter
|
|
@see JobContext
|
|
@see TaskAttemptContext]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.OutputCommitter -->
|
|
<!-- start interface org.apache.hadoop.mapred.OutputFormat -->
|
|
<interface name="OutputFormat" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link RecordWriter} for the given job.
|
|
|
|
@param ignored
|
|
@param job configuration for the job whose output is being written.
|
|
@param name the unique name for this part of the output.
|
|
@param progress mechanism for reporting progress while writing to file.
|
|
@return a {@link RecordWriter} to write the output for the job.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checkOutputSpecs"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check for validity of the output-specification for the job.
|
|
|
|
<p>This is to validate the output specification for the job when it is
|
|
a job is submitted. Typically checks that it does not already exist,
|
|
throwing an exception when it already exists, so that output is not
|
|
overwritten.</p>
|
|
|
|
@param ignored
|
|
@param job job configuration.
|
|
@throws IOException when output should not be attempted]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>OutputFormat</code> describes the output-specification for a
|
|
Map-Reduce job.
|
|
|
|
<p>The Map-Reduce framework relies on the <code>OutputFormat</code> of the
|
|
job to:<p>
|
|
<ol>
|
|
<li>
|
|
Validate the output-specification of the job. For e.g. check that the
|
|
output directory doesn't already exist.
|
|
<li>
|
|
Provide the {@link RecordWriter} implementation to be used to write out
|
|
the output files of the job. Output files are stored in a
|
|
{@link FileSystem}.
|
|
</li>
|
|
</ol>
|
|
|
|
@see RecordWriter
|
|
@see JobConf]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.OutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.OutputLogFilter -->
|
|
<class name="OutputLogFilter" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.fs.PathFilter"/>
|
|
<constructor name="OutputLogFilter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class filters log files from directory given
|
|
It doesnt accept paths having _logs.
|
|
This can be used to list paths of output directory as follows:
|
|
Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
|
|
new OutputLogFilter()));]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.OutputLogFilter -->
|
|
<!-- start interface org.apache.hadoop.mapred.Partitioner -->
|
|
<interface name="Partitioner" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<method name="getPartition" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K2"/>
|
|
<param name="value" type="V2"/>
|
|
<param name="numPartitions" type="int"/>
|
|
<doc>
|
|
<![CDATA[Get the paritition number for a given key (hence record) given the total
|
|
number of partitions i.e. number of reduce-tasks for the job.
|
|
|
|
<p>Typically a hash function on a all or a subset of the key.</p>
|
|
|
|
@param key the key to be paritioned.
|
|
@param value the entry value.
|
|
@param numPartitions the total number of partitions.
|
|
@return the partition number for the <code>key</code>.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Partitions the key space.
|
|
|
|
<p><code>Partitioner</code> controls the partitioning of the keys of the
|
|
intermediate map-outputs. The key (or a subset of the key) is used to derive
|
|
the partition, typically by a hash function. The total number of partitions
|
|
is the same as the number of reduce tasks for the job. Hence this controls
|
|
which of the <code>m</code> reduce tasks the intermediate key (and hence the
|
|
record) is sent for reduction.</p>
|
|
|
|
@see Reducer]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.Partitioner -->
|
|
<!-- start interface org.apache.hadoop.mapred.RecordReader -->
|
|
<interface name="RecordReader" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads the next key/value pair from the input for processing.
|
|
|
|
@param key the key to read data into
|
|
@param value the value to read data into
|
|
@return true iff a key/value was read, false if at EOF]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createKey" return="K"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create an object of the appropriate type to be used as a key.
|
|
|
|
@return a new key object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createValue" return="V"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create an object of the appropriate type to be used as a value.
|
|
|
|
@return a new value object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns the current position in the input.
|
|
|
|
@return the current position in the input.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close this {@link InputSplit} to future operations.
|
|
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[How much of the input has the {@link RecordReader} consumed i.e.
|
|
has been processed by?
|
|
|
|
@return progress from <code>0.0</code> to <code>1.0</code>.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>RecordReader</code> reads <key, value> pairs from an
|
|
{@link InputSplit}.
|
|
|
|
<p><code>RecordReader</code>, typically, converts the byte-oriented view of
|
|
the input, provided by the <code>InputSplit</code>, and presents a
|
|
record-oriented view for the {@link Mapper} & {@link Reducer} tasks for
|
|
processing. It thus assumes the responsibility of processing record
|
|
boundaries and presenting the tasks with keys and values.</p>
|
|
|
|
@see InputSplit
|
|
@see InputFormat]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.RecordReader -->
|
|
<!-- start interface org.apache.hadoop.mapred.RecordWriter -->
|
|
<interface name="RecordWriter" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes a key/value pair.
|
|
|
|
@param key the key to write.
|
|
@param value the value to write.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close this <code>RecordWriter</code> to future operations.
|
|
|
|
@param reporter facility to report progress.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>RecordWriter</code> writes the output <key, value> pairs
|
|
to an output file.
|
|
|
|
<p><code>RecordWriter</code> implementations write the job outputs to the
|
|
{@link FileSystem}.
|
|
|
|
@see OutputFormat]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.RecordWriter -->
|
|
<!-- start interface org.apache.hadoop.mapred.Reducer -->
|
|
<interface name="Reducer" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<implements name="org.apache.hadoop.io.Closeable"/>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K2"/>
|
|
<param name="values" type="java.util.Iterator<V2>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K3, V3>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[<i>Reduces</i> values for a given key.
|
|
|
|
<p>The framework calls this method for each
|
|
<code><key, (list of values)></code> pair in the grouped inputs.
|
|
Output values must be of the same type as input values. Input keys must
|
|
not be altered. The framework will <b>reuse</b> the key and value objects
|
|
that are passed into the reduce, therefore the application should clone
|
|
the objects they want to keep a copy of. In many cases, all values are
|
|
combined into zero or one value.
|
|
</p>
|
|
|
|
<p>Output pairs are collected with calls to
|
|
{@link OutputCollector#collect(Object,Object)}.</p>
|
|
|
|
<p>Applications can use the {@link Reporter} provided to report progress
|
|
or just indicate that they are alive. In scenarios where the application
|
|
takes an insignificant amount of time to process individual key/value
|
|
pairs, this is crucial since the framework might assume that the task has
|
|
timed-out and kill that task. The other way of avoiding this is to set
|
|
<a href="{@docRoot}/../hadoop-default.html#mapred.task.timeout">
|
|
mapred.task.timeout</a> to a high-enough value (or even zero for no
|
|
time-outs).</p>
|
|
|
|
@param key the key.
|
|
@param values the list of values to reduce.
|
|
@param output to collect keys and combined values.
|
|
@param reporter facility to report progress.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Reduces a set of intermediate values which share a key to a smaller set of
|
|
values.
|
|
|
|
<p>The number of <code>Reducer</code>s for the job is set by the user via
|
|
{@link JobConf#setNumReduceTasks(int)}. <code>Reducer</code> implementations
|
|
can access the {@link JobConf} for the job via the
|
|
{@link JobConfigurable#configure(JobConf)} method and initialize themselves.
|
|
Similarly they can use the {@link Closeable#close()} method for
|
|
de-initialization.</p>
|
|
|
|
<p><code>Reducer</code> has 3 primary phases:</p>
|
|
<ol>
|
|
<li>
|
|
|
|
<h4 id="Shuffle">Shuffle</h4>
|
|
|
|
<p><code>Reducer</code> is input the grouped output of a {@link Mapper}.
|
|
In the phase the framework, for each <code>Reducer</code>, fetches the
|
|
relevant partition of the output of all the <code>Mapper</code>s, via HTTP.
|
|
</p>
|
|
</li>
|
|
|
|
<li>
|
|
<h4 id="Sort">Sort</h4>
|
|
|
|
<p>The framework groups <code>Reducer</code> inputs by <code>key</code>s
|
|
(since different <code>Mapper</code>s may have output the same key) in this
|
|
stage.</p>
|
|
|
|
<p>The shuffle and sort phases occur simultaneously i.e. while outputs are
|
|
being fetched they are merged.</p>
|
|
|
|
<h5 id="SecondarySort">SecondarySort</h5>
|
|
|
|
<p>If equivalence rules for keys while grouping the intermediates are
|
|
different from those for grouping keys before reduction, then one may
|
|
specify a <code>Comparator</code> via
|
|
{@link JobConf#setOutputValueGroupingComparator(Class)}.Since
|
|
{@link JobConf#setOutputKeyComparatorClass(Class)} can be used to
|
|
control how intermediate keys are grouped, these can be used in conjunction
|
|
to simulate <i>secondary sort on values</i>.</p>
|
|
|
|
|
|
For example, say that you want to find duplicate web pages and tag them
|
|
all with the url of the "best" known example. You would set up the job
|
|
like:
|
|
<ul>
|
|
<li>Map Input Key: url</li>
|
|
<li>Map Input Value: document</li>
|
|
<li>Map Output Key: document checksum, url pagerank</li>
|
|
<li>Map Output Value: url</li>
|
|
<li>Partitioner: by checksum</li>
|
|
<li>OutputKeyComparator: by checksum and then decreasing pagerank</li>
|
|
<li>OutputValueGroupingComparator: by checksum</li>
|
|
</ul>
|
|
</li>
|
|
|
|
<li>
|
|
<h4 id="Reduce">Reduce</h4>
|
|
|
|
<p>In this phase the
|
|
{@link #reduce(Object, Iterator, OutputCollector, Reporter)}
|
|
method is called for each <code><key, (list of values)></code> pair in
|
|
the grouped inputs.</p>
|
|
<p>The output of the reduce task is typically written to the
|
|
{@link FileSystem} via
|
|
{@link OutputCollector#collect(Object, Object)}.</p>
|
|
</li>
|
|
</ol>
|
|
|
|
<p>The output of the <code>Reducer</code> is <b>not re-sorted</b>.</p>
|
|
|
|
<p>Example:</p>
|
|
<p><blockquote><pre>
|
|
public class MyReducer<K extends WritableComparable, V extends Writable>
|
|
extends MapReduceBase implements Reducer<K, V, K, V> {
|
|
|
|
static enum MyCounters { NUM_RECORDS }
|
|
|
|
private String reduceTaskId;
|
|
private int noKeys = 0;
|
|
|
|
public void configure(JobConf job) {
|
|
reduceTaskId = job.get("mapred.task.id");
|
|
}
|
|
|
|
public void reduce(K key, Iterator<V> values,
|
|
OutputCollector<K, V> output,
|
|
Reporter reporter)
|
|
throws IOException {
|
|
|
|
// Process
|
|
int noValues = 0;
|
|
while (values.hasNext()) {
|
|
V value = values.next();
|
|
|
|
// Increment the no. of values for this key
|
|
++noValues;
|
|
|
|
// Process the <key, value> pair (assume this takes a while)
|
|
// ...
|
|
// ...
|
|
|
|
// Let the framework know that we are alive, and kicking!
|
|
if ((noValues%10) == 0) {
|
|
reporter.progress();
|
|
}
|
|
|
|
// Process some more
|
|
// ...
|
|
// ...
|
|
|
|
// Output the <key, value>
|
|
output.collect(key, value);
|
|
}
|
|
|
|
// Increment the no. of <key, list of values> pairs processed
|
|
++noKeys;
|
|
|
|
// Increment counters
|
|
reporter.incrCounter(NUM_RECORDS, 1);
|
|
|
|
// Every 100 keys update application-level status
|
|
if ((noKeys%100) == 0) {
|
|
reporter.setStatus(reduceTaskId + " processed " + noKeys);
|
|
}
|
|
}
|
|
}
|
|
</pre></blockquote></p>
|
|
|
|
@see Mapper
|
|
@see Partitioner
|
|
@see Reporter
|
|
@see MapReduceBase]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.Reducer -->
|
|
<!-- start interface org.apache.hadoop.mapred.Reporter -->
|
|
<interface name="Reporter" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Progressable"/>
|
|
<method name="setStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="status" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the status description for the task.
|
|
|
|
@param status brief description of the current status.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounter" return="org.apache.hadoop.mapred.Counters.Counter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="group" type="java.lang.String"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link Counter} of the given group with the given name.
|
|
|
|
@param group counter group
|
|
@param name counter name
|
|
@return the <code>Counter</code> of the given group/name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrCounter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Enum"/>
|
|
<param name="amount" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increments the counter identified by the key, which can be of
|
|
any {@link Enum} type, by the specified amount.
|
|
|
|
@param key key to identify the counter to be incremented. The key can be
|
|
be any <code>Enum</code>.
|
|
@param amount A non-negative amount by which the counter is to
|
|
be incremented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrCounter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="group" type="java.lang.String"/>
|
|
<param name="counter" type="java.lang.String"/>
|
|
<param name="amount" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increments the counter identified by the group and counter name
|
|
by the specified amount.
|
|
|
|
@param group name to identify the group of the counter to be incremented.
|
|
@param counter name to identify the counter within the group.
|
|
@param amount A non-negative amount by which the counter is to
|
|
be incremented.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInputSplit" return="org.apache.hadoop.mapred.InputSplit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="UnsupportedOperationException" type="java.lang.UnsupportedOperationException"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link InputSplit} object for a map.
|
|
|
|
@return the <code>InputSplit</code> that the map is reading from.
|
|
@throws UnsupportedOperationException if called outside a mapper]]>
|
|
</doc>
|
|
</method>
|
|
<field name="NULL" type="org.apache.hadoop.mapred.Reporter"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A constant of Reporter type that does nothing.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A facility for Map-Reduce applications to report progress and update
|
|
counters, status information etc.
|
|
|
|
<p>{@link Mapper} and {@link Reducer} can use the <code>Reporter</code>
|
|
provided to report progress or just indicate that they are alive. In
|
|
scenarios where the application takes an insignificant amount of time to
|
|
process individual key/value pairs, this is crucial since the framework
|
|
might assume that the task has timed-out and kill that task.
|
|
|
|
<p>Applications can also update {@link Counters} via the provided
|
|
<code>Reporter</code> .</p>
|
|
|
|
@see Progressable
|
|
@see Counters]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.Reporter -->
|
|
<!-- start interface org.apache.hadoop.mapred.RunningJob -->
|
|
<interface name="RunningJob" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getID" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the job identifier.
|
|
|
|
@return the job identifier.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobID" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="This method is deprecated and will be removed. Applications should
|
|
rather use {@link #getID()}.">
|
|
<doc>
|
|
<![CDATA[@deprecated This method is deprecated and will be removed. Applications should
|
|
rather use {@link #getID()}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the name of the job.
|
|
|
|
@return the name of the job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobFile" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the path of the submitted job configuration.
|
|
|
|
@return the path of the submitted job configuration.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTrackingURL" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the URL where some job progress information will be displayed.
|
|
|
|
@return the URL where some job progress information will be displayed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="mapProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the <i>progress</i> of the job's map-tasks, as a float between 0.0
|
|
and 1.0. When all map tasks have completed, the function returns 1.0.
|
|
|
|
@return the progress of the job's map-tasks.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reduceProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the <i>progress</i> of the job's reduce-tasks, as a float between 0.0
|
|
and 1.0. When all reduce tasks have completed, the function returns 1.0.
|
|
|
|
@return the progress of the job's reduce-tasks.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="cleanupProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the <i>progress</i> of the job's cleanup-tasks, as a float between 0.0
|
|
and 1.0. When all cleanup tasks have completed, the function returns 1.0.
|
|
|
|
@return the progress of the job's cleanup-tasks.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setupProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the <i>progress</i> of the job's setup-tasks, as a float between 0.0
|
|
and 1.0. When all setup tasks have completed, the function returns 1.0.
|
|
|
|
@return the progress of the job's setup-tasks.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isComplete" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check if the job is finished or not.
|
|
This is a non-blocking call.
|
|
|
|
@return <code>true</code> if the job is complete, else <code>false</code>.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isSuccessful" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check if the job completed successfully.
|
|
|
|
@return <code>true</code> if the job succeeded, else <code>false</code>.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="waitForCompletion"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Blocks until the job is complete.
|
|
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobState" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns the current state of the Job.
|
|
{@link JobStatus}
|
|
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="killJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Kill the running job. Blocks until all job tasks have been
|
|
killed as well. If the job is no longer running, it simply returns.
|
|
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobPriority"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="priority" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Set the priority of a running job.
|
|
@param priority the new priority for the job.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskCompletionEvents" return="org.apache.hadoop.mapred.TaskCompletionEvent[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="startFrom" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get events indicating completion (success/failure) of component tasks.
|
|
|
|
@param startFrom index to start fetching events from
|
|
@return an array of {@link TaskCompletionEvent}s
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="killTask"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="shouldFail" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Kill indicated task attempt.
|
|
|
|
@param taskId the id of the task to be terminated.
|
|
@param shouldFail if true the task is failed and added to failed tasks
|
|
list, otherwise it is just killed, w/o affecting
|
|
job failure status.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="killTask"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="Applications should rather use {@link #killTask(TaskAttemptID, boolean)}">
|
|
<param name="taskId" type="java.lang.String"/>
|
|
<param name="shouldFail" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@deprecated Applications should rather use {@link #killTask(TaskAttemptID, boolean)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounters" return="org.apache.hadoop.mapred.Counters"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Gets the counters for this job.
|
|
|
|
@return the counters for this job.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>RunningJob</code> is the user-interface to query for details on a
|
|
running Map-Reduce job.
|
|
|
|
<p>Clients can get hold of <code>RunningJob</code> via the {@link JobClient}
|
|
and then query the running-job for details such as name, configuration,
|
|
progress etc.</p>
|
|
|
|
@see JobClient]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.RunningJob -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat -->
|
|
<class name="SequenceFileAsBinaryInputFormat" extends="org.apache.hadoop.mapred.SequenceFileInputFormat<org.apache.hadoop.io.BytesWritable, org.apache.hadoop.io.BytesWritable>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileAsBinaryInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.BytesWritable, org.apache.hadoop.io.BytesWritable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[InputFormat reading keys, values from SequenceFiles in binary (raw)
|
|
format.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat.SequenceFileAsBinaryRecordReader -->
|
|
<class name="SequenceFileAsBinaryInputFormat.SequenceFileAsBinaryRecordReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.BytesWritable, org.apache.hadoop.io.BytesWritable>"/>
|
|
<constructor name="SequenceFileAsBinaryInputFormat.SequenceFileAsBinaryRecordReader" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapred.FileSplit"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="createKey" return="org.apache.hadoop.io.BytesWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createValue" return="org.apache.hadoop.io.BytesWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getKeyClassName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Retrieve the name of the key class for this SequenceFile.
|
|
@see org.apache.hadoop.io.SequenceFile.Reader#getKeyClassName]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValueClassName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Retrieve the name of the value class for this SequenceFile.
|
|
@see org.apache.hadoop.io.SequenceFile.Reader#getValueClassName]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.BytesWritable"/>
|
|
<param name="val" type="org.apache.hadoop.io.BytesWritable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read raw bytes from a SequenceFile.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the progress within the input split
|
|
@return 0.0 to 1.0 of the input byte range]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Read records from a SequenceFile as binary (raw) bytes.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat.SequenceFileAsBinaryRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat -->
|
|
<class name="SequenceFileAsBinaryOutputFormat" extends="org.apache.hadoop.mapred.SequenceFileOutputFormat<org.apache.hadoop.io.BytesWritable, org.apache.hadoop.io.BytesWritable>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileAsBinaryOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setSequenceFileOutputKeyClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="theClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Set the key class for the {@link SequenceFile}
|
|
<p>This allows the user to specify the key class to be different
|
|
from the actual class ({@link BytesWritable}) used for writing </p>
|
|
|
|
@param conf the {@link JobConf} to modify
|
|
@param theClass the SequenceFile output key class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSequenceFileOutputValueClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="theClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Set the value class for the {@link SequenceFile}
|
|
<p>This allows the user to specify the value class to be different
|
|
from the actual class ({@link BytesWritable}) used for writing </p>
|
|
|
|
@param conf the {@link JobConf} to modify
|
|
@param theClass the SequenceFile output key class.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSequenceFileOutputKeyClass" return="java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the key class for the {@link SequenceFile}
|
|
|
|
@return the key class of the {@link SequenceFile}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSequenceFileOutputValueClass" return="java.lang.Class<? extends org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the value class for the {@link SequenceFile}
|
|
|
|
@return the value class of the {@link SequenceFile}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<org.apache.hadoop.io.BytesWritable, org.apache.hadoop.io.BytesWritable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="checkOutputSpecs"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link OutputFormat} that writes keys, values to
|
|
{@link SequenceFile}s in binary(raw) format]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat.WritableValueBytes -->
|
|
<class name="SequenceFileAsBinaryOutputFormat.WritableValueBytes" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.SequenceFile.ValueBytes"/>
|
|
<constructor name="SequenceFileAsBinaryOutputFormat.WritableValueBytes"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="SequenceFileAsBinaryOutputFormat.WritableValueBytes" type="org.apache.hadoop.io.BytesWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="org.apache.hadoop.io.BytesWritable"/>
|
|
</method>
|
|
<method name="writeUncompressedBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="outStream" type="java.io.DataOutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeCompressedBytes"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="outStream" type="java.io.DataOutputStream"/>
|
|
<exception name="IllegalArgumentException" type="java.lang.IllegalArgumentException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Inner class used for appendRaw]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat.WritableValueBytes -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileAsTextInputFormat -->
|
|
<class name="SequenceFileAsTextInputFormat" extends="org.apache.hadoop.mapred.SequenceFileInputFormat<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileAsTextInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class is similar to SequenceFileInputFormat, except it generates SequenceFileAsTextRecordReader
|
|
which converts the input keys and values to their String forms by calling toString() method.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileAsTextInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileAsTextRecordReader -->
|
|
<class name="SequenceFileAsTextRecordReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<constructor name="SequenceFileAsTextRecordReader" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapred.FileSplit"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="createKey" return="org.apache.hadoop.io.Text"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createValue" return="org.apache.hadoop.io.Text"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Text"/>
|
|
<param name="value" type="org.apache.hadoop.io.Text"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read key/value pair in a line.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class converts the input keys and values to their String forms by calling toString()
|
|
method. This class to SequenceFileAsTextInputFormat class is as LineRecordReader
|
|
class to TextInputFormat class.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileAsTextRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileInputFilter -->
|
|
<class name="SequenceFileInputFilter" extends="org.apache.hadoop.mapred.SequenceFileInputFormat<K, V>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileInputFilter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a record reader for the given split
|
|
@param split file split
|
|
@param job job configuration
|
|
@param reporter reporter who sends report to task tracker
|
|
@return RecordReader]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setFilterClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="filterClass" type="java.lang.Class"/>
|
|
<doc>
|
|
<![CDATA[set the filter class
|
|
|
|
@param conf application configuration
|
|
@param filterClass filter class]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A class that allows a map/red job to work on a sample of sequence files.
|
|
The sample is decided by the filter class set by the job.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileInputFilter -->
|
|
<!-- start interface org.apache.hadoop.mapred.SequenceFileInputFilter.Filter -->
|
|
<interface name="SequenceFileInputFilter.Filter" abstract="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[filter function
|
|
Decide if a record should be filtered or not
|
|
@param key record key
|
|
@return true if a record is accepted; return false otherwise]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[filter interface]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.SequenceFileInputFilter.Filter -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileInputFilter.FilterBase -->
|
|
<class name="SequenceFileInputFilter.FilterBase" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.SequenceFileInputFilter.Filter"/>
|
|
<constructor name="SequenceFileInputFilter.FilterBase"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[base class for Filters]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileInputFilter.FilterBase -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileInputFilter.MD5Filter -->
|
|
<class name="SequenceFileInputFilter.MD5Filter" extends="org.apache.hadoop.mapred.SequenceFileInputFilter.FilterBase"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileInputFilter.MD5Filter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setFrequency"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="frequency" type="int"/>
|
|
<doc>
|
|
<![CDATA[set the filtering frequency in configuration
|
|
|
|
@param conf configuration
|
|
@param frequency filtering frequency]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[configure the filter according to configuration
|
|
|
|
@param conf configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Filtering method
|
|
If MD5(key) % frequency==0, return true; otherwise return false
|
|
@see org.apache.hadoop.mapred.SequenceFileInputFilter.Filter#accept(Object)]]>
|
|
</doc>
|
|
</method>
|
|
<field name="MD5_LEN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This class returns a set of records by examing the MD5 digest of its
|
|
key against a filtering frequency <i>f</i>. The filtering criteria is
|
|
MD5(key) % f == 0.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileInputFilter.MD5Filter -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileInputFilter.PercentFilter -->
|
|
<class name="SequenceFileInputFilter.PercentFilter" extends="org.apache.hadoop.mapred.SequenceFileInputFilter.FilterBase"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileInputFilter.PercentFilter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setFrequency"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="frequency" type="int"/>
|
|
<doc>
|
|
<![CDATA[set the frequency and stores it in conf
|
|
@param conf configuration
|
|
@param frequency filtering frequencey]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[configure the filter by checking the configuration
|
|
|
|
@param conf configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Filtering method
|
|
If record# % frequency==0, return true; otherwise return false
|
|
@see org.apache.hadoop.mapred.SequenceFileInputFilter.Filter#accept(Object)]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class returns a percentage of records
|
|
The percentage is determined by a filtering frequency <i>f</i> using
|
|
the criteria record# % f == 0.
|
|
For example, if the frequency is 10, one out of 10 records is returned.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileInputFilter.PercentFilter -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileInputFilter.RegexFilter -->
|
|
<class name="SequenceFileInputFilter.RegexFilter" extends="org.apache.hadoop.mapred.SequenceFileInputFilter.FilterBase"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileInputFilter.RegexFilter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setPattern"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="regex" type="java.lang.String"/>
|
|
<exception name="PatternSyntaxException" type="java.util.regex.PatternSyntaxException"/>
|
|
<doc>
|
|
<![CDATA[Define the filtering regex and stores it in conf
|
|
@param conf where the regex is set
|
|
@param regex regex used as a filter]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[configure the Filter by checking the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="accept" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Filtering method
|
|
If key matches the regex, return true; otherwise return false
|
|
@see org.apache.hadoop.mapred.SequenceFileInputFilter.Filter#accept(Object)]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Records filter by matching key to regex]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileInputFilter.RegexFilter -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileInputFormat -->
|
|
<class name="SequenceFileInputFormat" extends="org.apache.hadoop.mapred.FileInputFormat<K, V>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="listStatus" return="org.apache.hadoop.fs.FileStatus[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link InputFormat} for {@link SequenceFile}s.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileOutputFormat -->
|
|
<class name="SequenceFileOutputFormat" extends="org.apache.hadoop.mapred.FileOutputFormat<K, V>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SequenceFileOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getReaders" return="org.apache.hadoop.io.SequenceFile.Reader[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="dir" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Open the output generated by this format.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputCompressionType" return="org.apache.hadoop.io.SequenceFile.CompressionType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the {@link CompressionType} for the output {@link SequenceFile}.
|
|
@param conf the {@link JobConf}
|
|
@return the {@link CompressionType} for the output {@link SequenceFile},
|
|
defaulting to {@link CompressionType#RECORD}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutputCompressionType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="style" type="org.apache.hadoop.io.SequenceFile.CompressionType"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link CompressionType} for the output {@link SequenceFile}.
|
|
@param conf the {@link JobConf} to modify
|
|
@param style the {@link CompressionType} for the output
|
|
{@link SequenceFile}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link OutputFormat} that writes {@link SequenceFile}s.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.SequenceFileRecordReader -->
|
|
<class name="SequenceFileRecordReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordReader<K, V>"/>
|
|
<constructor name="SequenceFileRecordReader" type="org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapred.FileSplit"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="getKeyClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The class of key that must be passed to {@link
|
|
#next(Object, Object)}..]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValueClass" return="java.lang.Class"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The class of value that must be passed to {@link
|
|
#next(Object, Object)}..]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createKey" return="K"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="createValue" return="V"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getCurrentValue"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="value" type="V"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the progress within the input split
|
|
@return 0.0 to 1.0 of the input byte range]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="seek"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="pos" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="conf" type="org.apache.hadoop.conf.Configuration"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An {@link RecordReader} for {@link SequenceFile}s.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SequenceFileRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.SkipBadRecords -->
|
|
<class name="SkipBadRecords" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SkipBadRecords"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getAttemptsToStartSkipping" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the number of Task attempts AFTER which skip mode
|
|
will be kicked off. When skip mode is kicked off, the
|
|
tasks reports the range of records which it will process
|
|
next to the TaskTracker. So that on failures, TT knows which
|
|
ones are possibly the bad records. On further executions,
|
|
those are skipped.
|
|
Default value is 2.
|
|
|
|
@param conf the configuration
|
|
@return attemptsToStartSkipping no of task attempts]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setAttemptsToStartSkipping"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="attemptsToStartSkipping" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the number of Task attempts AFTER which skip mode
|
|
will be kicked off. When skip mode is kicked off, the
|
|
tasks reports the range of records which it will process
|
|
next to the TaskTracker. So that on failures, TT knows which
|
|
ones are possibly the bad records. On further executions,
|
|
those are skipped.
|
|
Default value is 2.
|
|
|
|
@param conf the configuration
|
|
@param attemptsToStartSkipping no of task attempts]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAutoIncrMapperProcCount" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the flag which if set to true,
|
|
{@link SkipBadRecords#COUNTER_MAP_PROCESSED_RECORDS} is incremented
|
|
by MapRunner after invoking the map function. This value must be set to
|
|
false for applications which process the records asynchronously
|
|
or buffer the input records. For example streaming.
|
|
In such cases applications should increment this counter on their own.
|
|
Default value is true.
|
|
|
|
@param conf the configuration
|
|
@return <code>true</code> if auto increment
|
|
{@link SkipBadRecords#COUNTER_MAP_PROCESSED_RECORDS}.
|
|
<code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setAutoIncrMapperProcCount"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="autoIncr" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set the flag which if set to true,
|
|
{@link SkipBadRecords#COUNTER_MAP_PROCESSED_RECORDS} is incremented
|
|
by MapRunner after invoking the map function. This value must be set to
|
|
false for applications which process the records asynchronously
|
|
or buffer the input records. For example streaming.
|
|
In such cases applications should increment this counter on their own.
|
|
Default value is true.
|
|
|
|
@param conf the configuration
|
|
@param autoIncr whether to auto increment
|
|
{@link SkipBadRecords#COUNTER_MAP_PROCESSED_RECORDS}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAutoIncrReducerProcCount" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the flag which if set to true,
|
|
{@link SkipBadRecords#COUNTER_REDUCE_PROCESSED_GROUPS} is incremented
|
|
by framework after invoking the reduce function. This value must be set to
|
|
false for applications which process the records asynchronously
|
|
or buffer the input records. For example streaming.
|
|
In such cases applications should increment this counter on their own.
|
|
Default value is true.
|
|
|
|
@param conf the configuration
|
|
@return <code>true</code> if auto increment
|
|
{@link SkipBadRecords#COUNTER_REDUCE_PROCESSED_GROUPS}.
|
|
<code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setAutoIncrReducerProcCount"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="autoIncr" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set the flag which if set to true,
|
|
{@link SkipBadRecords#COUNTER_REDUCE_PROCESSED_GROUPS} is incremented
|
|
by framework after invoking the reduce function. This value must be set to
|
|
false for applications which process the records asynchronously
|
|
or buffer the input records. For example streaming.
|
|
In such cases applications should increment this counter on their own.
|
|
Default value is true.
|
|
|
|
@param conf the configuration
|
|
@param autoIncr whether to auto increment
|
|
{@link SkipBadRecords#COUNTER_REDUCE_PROCESSED_GROUPS}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSkipOutputPath" return="org.apache.hadoop.fs.Path"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the directory to which skipped records are written. By default it is
|
|
the sub directory of the output _logs directory.
|
|
User can stop writing skipped records by setting the value null.
|
|
|
|
@param conf the configuration.
|
|
@return path skip output directory. Null is returned if this is not set
|
|
and output directory is also not set.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setSkipOutputPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the directory to which skipped records are written. By default it is
|
|
the sub directory of the output _logs directory.
|
|
User can stop writing skipped records by setting the value null.
|
|
|
|
@param conf the configuration.
|
|
@param path skip output directory path]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapperMaxSkipRecords" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the number of acceptable skip records surrounding the bad record PER
|
|
bad record in mapper. The number includes the bad record as well.
|
|
To turn the feature of detection/skipping of bad records off, set the
|
|
value to 0.
|
|
The framework tries to narrow down the skipped range by retrying
|
|
until this threshold is met OR all attempts get exhausted for this task.
|
|
Set the value to Long.MAX_VALUE to indicate that framework need not try to
|
|
narrow down. Whatever records(depends on application) get skipped are
|
|
acceptable.
|
|
Default value is 0.
|
|
|
|
@param conf the configuration
|
|
@return maxSkipRecs acceptable skip records.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapperMaxSkipRecords"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="maxSkipRecs" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the number of acceptable skip records surrounding the bad record PER
|
|
bad record in mapper. The number includes the bad record as well.
|
|
To turn the feature of detection/skipping of bad records off, set the
|
|
value to 0.
|
|
The framework tries to narrow down the skipped range by retrying
|
|
until this threshold is met OR all attempts get exhausted for this task.
|
|
Set the value to Long.MAX_VALUE to indicate that framework need not try to
|
|
narrow down. Whatever records(depends on application) get skipped are
|
|
acceptable.
|
|
Default value is 0.
|
|
|
|
@param conf the configuration
|
|
@param maxSkipRecs acceptable skip records.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReducerMaxSkipGroups" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the number of acceptable skip groups surrounding the bad group PER
|
|
bad group in reducer. The number includes the bad group as well.
|
|
To turn the feature of detection/skipping of bad groups off, set the
|
|
value to 0.
|
|
The framework tries to narrow down the skipped range by retrying
|
|
until this threshold is met OR all attempts get exhausted for this task.
|
|
Set the value to Long.MAX_VALUE to indicate that framework need not try to
|
|
narrow down. Whatever groups(depends on application) get skipped are
|
|
acceptable.
|
|
Default value is 0.
|
|
|
|
@param conf the configuration
|
|
@return maxSkipGrps acceptable skip groups.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setReducerMaxSkipGroups"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="maxSkipGrps" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the number of acceptable skip groups surrounding the bad group PER
|
|
bad group in reducer. The number includes the bad group as well.
|
|
To turn the feature of detection/skipping of bad groups off, set the
|
|
value to 0.
|
|
The framework tries to narrow down the skipped range by retrying
|
|
until this threshold is met OR all attempts get exhausted for this task.
|
|
Set the value to Long.MAX_VALUE to indicate that framework need not try to
|
|
narrow down. Whatever groups(depends on application) get skipped are
|
|
acceptable.
|
|
Default value is 0.
|
|
|
|
@param conf the configuration
|
|
@param maxSkipGrps acceptable skip groups.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="COUNTER_GROUP" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Special counters which are written by the application and are
|
|
used by the framework for detecting bad records. For detecting bad records
|
|
these counters must be incremented by the application.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="COUNTER_MAP_PROCESSED_RECORDS" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Number of processed map records.
|
|
@see SkipBadRecords#getAutoIncrMapperProcCount(Configuration)]]>
|
|
</doc>
|
|
</field>
|
|
<field name="COUNTER_REDUCE_PROCESSED_GROUPS" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Number of processed reduce groups.
|
|
@see SkipBadRecords#getAutoIncrReducerProcCount(Configuration)]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Utility class for skip bad records functionality. It contains various
|
|
settings related to skipping of bad records.
|
|
|
|
<p>Hadoop provides an optional mode of execution in which the bad records
|
|
are detected and skipped in further attempts.
|
|
|
|
<p>This feature can be used when map/reduce tasks crashes deterministically on
|
|
certain input. This happens due to bugs in the map/reduce function. The usual
|
|
course would be to fix these bugs. But sometimes this is not possible;
|
|
perhaps the bug is in third party libraries for which the source code is
|
|
not available. Due to this, the task never reaches to completion even with
|
|
multiple attempts and complete data for that task is lost.</p>
|
|
|
|
<p>With this feature, only a small portion of data is lost surrounding
|
|
the bad record, which may be acceptable for some user applications.
|
|
see {@link SkipBadRecords#setMapperMaxSkipRecords(Configuration, long)}</p>
|
|
|
|
<p>The skipping mode gets kicked off after certain no of failures
|
|
see {@link SkipBadRecords#setAttemptsToStartSkipping(Configuration, int)}</p>
|
|
|
|
<p>In the skipping mode, the map/reduce task maintains the record range which
|
|
is getting processed at all times. Before giving the input to the
|
|
map/reduce function, it sends this record range to the Task tracker.
|
|
If task crashes, the Task tracker knows which one was the last reported
|
|
range. On further attempts that range get skipped.</p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.SkipBadRecords -->
|
|
<!-- start class org.apache.hadoop.mapred.StatusHttpServer -->
|
|
<class name="StatusHttpServer" extends="org.apache.hadoop.http.HttpServer"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A mapred http server.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.StatusHttpServer -->
|
|
<!-- start class org.apache.hadoop.mapred.StatusHttpServer.TaskGraphServlet -->
|
|
<class name="StatusHttpServer.TaskGraphServlet" extends="javax.servlet.http.HttpServlet"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="StatusHttpServer.TaskGraphServlet"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="doGet"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="request" type="javax.servlet.http.HttpServletRequest"/>
|
|
<param name="response" type="javax.servlet.http.HttpServletResponse"/>
|
|
<exception name="ServletException" type="javax.servlet.ServletException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="width" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[height of the graph w/o margins]]>
|
|
</doc>
|
|
</field>
|
|
<field name="height" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[height of the graph w/o margins]]>
|
|
</doc>
|
|
</field>
|
|
<field name="ymargin" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[margin space on y axis]]>
|
|
</doc>
|
|
</field>
|
|
<field name="xmargin" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[margin space on x axis]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[The servlet that outputs svg graphics for map / reduce task
|
|
statuses]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.StatusHttpServer.TaskGraphServlet -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskAttemptContext -->
|
|
<class name="TaskAttemptContext" extends="org.apache.hadoop.mapred.JobContext"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getTaskAttemptID" return="org.apache.hadoop.mapred.TaskAttemptID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the taskAttemptID.
|
|
|
|
@return TaskAttemptID]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobConf" return="org.apache.hadoop.mapred.JobConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the job Configuration.
|
|
|
|
@return JobConf]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskAttemptContext -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskAttemptID -->
|
|
<class name="TaskAttemptID" extends="org.apache.hadoop.mapred.ID"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TaskAttemptID" type="org.apache.hadoop.mapred.TaskID, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a TaskAttemptID object from given {@link TaskID}.
|
|
@param taskId TaskID that this task belongs to
|
|
@param id the task attempt number]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="TaskAttemptID" type="java.lang.String, int, boolean, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a TaskId object from given parts.
|
|
@param jtIdentifier jobTracker identifier
|
|
@param jobId job number
|
|
@param isMap whether the tip is a map
|
|
@param taskId taskId number
|
|
@param id the task attempt number]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getJobID" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the {@link JobID} object that this task attempt belongs to]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskID" return="org.apache.hadoop.mapred.TaskID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the {@link TaskID} object that this task attempt belongs to]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isMap" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns whether this TaskAttemptID is a map ID]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="org.apache.hadoop.mapred.ID"/>
|
|
<doc>
|
|
<![CDATA[Compare TaskIds by first tipIds, then by task numbers.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="org.apache.hadoop.mapred.TaskAttemptID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="forName" return="org.apache.hadoop.mapred.TaskAttemptID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<exception name="IllegalArgumentException" type="java.lang.IllegalArgumentException"/>
|
|
<doc>
|
|
<![CDATA[Construct a TaskAttemptID object from given string
|
|
@return constructed TaskAttemptID object or null if the given String is null
|
|
@throws IllegalArgumentException if the given string is malformed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskAttemptIDsPattern" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jtIdentifier" type="java.lang.String"/>
|
|
<param name="jobId" type="java.lang.Integer"/>
|
|
<param name="isMap" type="java.lang.Boolean"/>
|
|
<param name="taskId" type="java.lang.Integer"/>
|
|
<param name="attemptId" type="java.lang.Integer"/>
|
|
<doc>
|
|
<![CDATA[Returns a regex pattern which matches task attempt IDs. Arguments can
|
|
be given null, in which case that part of the regex will be generic.
|
|
For example to obtain a regex matching <i>all task attempt IDs</i>
|
|
of <i>any jobtracker</i>, in <i>any job</i>, of the <i>first
|
|
map task</i>, we would use :
|
|
<pre>
|
|
TaskAttemptID.getTaskAttemptIDsPattern(null, null, true, 1, null);
|
|
</pre>
|
|
which will return :
|
|
<pre> "attempt_[^_]*_[0-9]*_m_000001_[0-9]*" </pre>
|
|
@param jtIdentifier jobTracker identifier, or null
|
|
@param jobId job number, or null
|
|
@param isMap whether the tip is a map, or null
|
|
@param taskId taskId number, or null
|
|
@param attemptId the task attempt number, or null
|
|
@return a regex pattern matching TaskAttemptIDs]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[TaskAttemptID represents the immutable and unique identifier for
|
|
a task attempt. Each task attempt is one particular instance of a Map or
|
|
Reduce Task identified by its TaskID.
|
|
|
|
TaskAttemptID consists of 2 parts. First part is the
|
|
{@link TaskID}, that this TaskAttemptID belongs to.
|
|
Second part is the task attempt number. <br>
|
|
An example TaskAttemptID is :
|
|
<code>attempt_200707121733_0003_m_000005_0</code> , which represents the
|
|
zeroth task attempt for the fifth map task in the third job
|
|
running at the jobtracker started at <code>200707121733</code>.
|
|
<p>
|
|
Applications should never construct or parse TaskAttemptID strings
|
|
, but rather use appropriate constructors or {@link #forName(String)}
|
|
method.
|
|
|
|
@see JobID
|
|
@see TaskID]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskAttemptID -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskCompletionEvent -->
|
|
<class name="TaskCompletionEvent" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="TaskCompletionEvent"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default constructor for Writable.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="TaskCompletionEvent" type="int, org.apache.hadoop.mapred.TaskAttemptID, int, boolean, org.apache.hadoop.mapred.TaskCompletionEvent.Status, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor. eventId should be created externally and incremented
|
|
per event for each job.
|
|
@param eventId event id, event id should be unique and assigned in
|
|
incrementally, starting from 0.
|
|
@param taskId task id
|
|
@param status task's status
|
|
@param taskTrackerHttp task tracker's host:port for http.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getEventId" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns event Id.
|
|
@return event id]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskId" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use {@link #getTaskAttemptId()} instead.">
|
|
<doc>
|
|
<![CDATA[Returns task id.
|
|
@return task id
|
|
@deprecated use {@link #getTaskAttemptId()} instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskAttemptId" return="org.apache.hadoop.mapred.TaskAttemptID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns task id.
|
|
@return task id]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskStatus" return="org.apache.hadoop.mapred.TaskCompletionEvent.Status"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns enum Status.SUCESS or Status.FAILURE.
|
|
@return task tracker status]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskTrackerHttp" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[http location of the tasktracker where this task ran.
|
|
@return http location of tasktracker user logs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskRunTime" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns time (in millisec) the task took to complete.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskRunTime"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskCompletionTime" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the task completion time
|
|
@param taskCompletionTime time (in millisec) the task took to complete]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setEventId"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="eventId" type="int"/>
|
|
<doc>
|
|
<![CDATA[set event Id. should be assigned incrementally starting from 0.
|
|
@param eventId]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskId"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use {@link #setTaskID(TaskAttemptID)} instead.">
|
|
<param name="taskId" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets task id.
|
|
@param taskId
|
|
@deprecated use {@link #setTaskID(TaskAttemptID)} instead.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<doc>
|
|
<![CDATA[Sets task id.
|
|
@param taskId]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskStatus"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="status" type="org.apache.hadoop.mapred.TaskCompletionEvent.Status"/>
|
|
<doc>
|
|
<![CDATA[Set task status.
|
|
@param status]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskTrackerHttp"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskTrackerHttp" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set task tracker http location.
|
|
@param taskTrackerHttp]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="isMapTask" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="idWithinJob" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="EMPTY_ARRAY" type="org.apache.hadoop.mapred.TaskCompletionEvent[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This is used to track task completion events on
|
|
job tracker.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskCompletionEvent -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskCompletionEvent.Status -->
|
|
<class name="TaskCompletionEvent.Status" extends="java.lang.Enum<org.apache.hadoop.mapred.TaskCompletionEvent.Status>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.TaskCompletionEvent.Status[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.TaskCompletionEvent.Status"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskCompletionEvent.Status -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskID -->
|
|
<class name="TaskID" extends="org.apache.hadoop.mapred.ID"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TaskID" type="org.apache.hadoop.mapred.JobID, boolean, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a TaskID object from given {@link JobID}.
|
|
@param jobId JobID that this tip belongs to
|
|
@param isMap whether the tip is a map
|
|
@param id the tip number]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="TaskID" type="java.lang.String, int, boolean, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs a TaskInProgressId object from given parts.
|
|
@param jtIdentifier jobTracker identifier
|
|
@param jobId job number
|
|
@param isMap whether the tip is a map
|
|
@param id the tip number]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getJobID" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the {@link JobID} object that this tip belongs to]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isMap" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns whether this TaskID is a map ID]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="org.apache.hadoop.mapred.ID"/>
|
|
<doc>
|
|
<![CDATA[Compare TaskInProgressIds by first jobIds, then by tip numbers. Reduces are
|
|
defined as greater then maps.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="org.apache.hadoop.mapred.TaskID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="forName" return="org.apache.hadoop.mapred.TaskID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<exception name="IllegalArgumentException" type="java.lang.IllegalArgumentException"/>
|
|
<doc>
|
|
<![CDATA[Construct a TaskID object from given string
|
|
@return constructed TaskID object or null if the given String is null
|
|
@throws IllegalArgumentException if the given string is malformed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskIDsPattern" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jtIdentifier" type="java.lang.String"/>
|
|
<param name="jobId" type="java.lang.Integer"/>
|
|
<param name="isMap" type="java.lang.Boolean"/>
|
|
<param name="taskId" type="java.lang.Integer"/>
|
|
<doc>
|
|
<![CDATA[Returns a regex pattern which matches task IDs. Arguments can
|
|
be given null, in which case that part of the regex will be generic.
|
|
For example to obtain a regex matching <i>the first map task</i>
|
|
of <i>any jobtracker</i>, of <i>any job</i>, we would use :
|
|
<pre>
|
|
TaskID.getTaskIDsPattern(null, null, true, 1);
|
|
</pre>
|
|
which will return :
|
|
<pre> "task_[^_]*_[0-9]*_m_000001*" </pre>
|
|
@param jtIdentifier jobTracker identifier, or null
|
|
@param jobId job number, or null
|
|
@param isMap whether the tip is a map, or null
|
|
@param taskId taskId number, or null
|
|
@return a regex pattern matching TaskIDs]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[TaskID represents the immutable and unique identifier for
|
|
a Map or Reduce Task. Each TaskID encompasses multiple attempts made to
|
|
execute the Map or Reduce Task, each of which are uniquely indentified by
|
|
their TaskAttemptID.
|
|
|
|
TaskID consists of 3 parts. First part is the {@link JobID}, that this
|
|
TaskInProgress belongs to. Second part of the TaskID is either 'm' or 'r'
|
|
representing whether the task is a map task or a reduce task.
|
|
And the third part is the task number. <br>
|
|
An example TaskID is :
|
|
<code>task_200707121733_0003_m_000005</code> , which represents the
|
|
fifth map task in the third job running at the jobtracker
|
|
started at <code>200707121733</code>.
|
|
<p>
|
|
Applications should never construct or parse TaskID strings
|
|
, but rather use appropriate constructors or {@link #forName(String)}
|
|
method.
|
|
|
|
@see JobID
|
|
@see TaskAttemptID]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskID -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskLog -->
|
|
<class name="TaskLog" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TaskLog"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getTaskLogFile" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="filter" type="org.apache.hadoop.mapred.TaskLog.LogName"/>
|
|
</method>
|
|
<method name="getRealTaskLogFileLocation" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="filter" type="org.apache.hadoop.mapred.TaskLog.LogName"/>
|
|
</method>
|
|
<method name="getIndexFile" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="java.lang.String"/>
|
|
</method>
|
|
<method name="getIndexFile" return="java.io.File"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="java.lang.String"/>
|
|
<param name="isCleanup" type="boolean"/>
|
|
</method>
|
|
<method name="syncLogs"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="firstTaskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="syncLogs"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="firstTaskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="isCleanup" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="cleanup"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logsRetainHours" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Purge old user logs.
|
|
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskLogLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the desired maximum length of task's logs.
|
|
@param conf the job to look in
|
|
@return the number of bytes to cap the log files at]]>
|
|
</doc>
|
|
</method>
|
|
<method name="captureOutAndError" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cmd" type="java.util.List<java.lang.String>"/>
|
|
<param name="stdoutFilename" type="java.io.File"/>
|
|
<param name="stderrFilename" type="java.io.File"/>
|
|
<param name="tailLength" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Wrap a command in a shell to capture stdout and stderr to files.
|
|
If the tailLength is 0, the entire output will be saved.
|
|
@param cmd The command and the arguments that should be run
|
|
@param stdoutFilename The filename that stdout should be saved to
|
|
@param stderrFilename The filename that stderr should be saved to
|
|
@param tailLength The length of the tail to be saved.
|
|
@return the modified command that should be run]]>
|
|
</doc>
|
|
</method>
|
|
<method name="captureOutAndError" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="setup" type="java.util.List<java.lang.String>"/>
|
|
<param name="cmd" type="java.util.List<java.lang.String>"/>
|
|
<param name="stdoutFilename" type="java.io.File"/>
|
|
<param name="stderrFilename" type="java.io.File"/>
|
|
<param name="tailLength" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Wrap a command in a shell to capture stdout and stderr to files.
|
|
Setup commands such as setting memory limit can be passed which
|
|
will be executed before exec.
|
|
If the tailLength is 0, the entire output will be saved.
|
|
@param setup The setup commands for the execed process.
|
|
@param cmd The command and the arguments that should be run
|
|
@param stdoutFilename The filename that stdout should be saved to
|
|
@param stderrFilename The filename that stderr should be saved to
|
|
@param tailLength The length of the tail to be saved.
|
|
@return the modified command that should be run]]>
|
|
</doc>
|
|
</method>
|
|
<method name="captureOutAndError" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="setup" type="java.util.List<java.lang.String>"/>
|
|
<param name="cmd" type="java.util.List<java.lang.String>"/>
|
|
<param name="stdoutFilename" type="java.io.File"/>
|
|
<param name="stderrFilename" type="java.io.File"/>
|
|
<param name="tailLength" type="long"/>
|
|
<param name="pidFileName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Wrap a command in a shell to capture stdout and stderr to files.
|
|
Setup commands such as setting memory limit can be passed which
|
|
will be executed before exec.
|
|
If the tailLength is 0, the entire output will be saved.
|
|
@param setup The setup commands for the execed process.
|
|
@param cmd The command and the arguments that should be run
|
|
@param stdoutFilename The filename that stdout should be saved to
|
|
@param stderrFilename The filename that stderr should be saved to
|
|
@param tailLength The length of the tail to be saved.
|
|
@param pidFileName The name of the pid-file
|
|
@return the modified command that should be run]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addCommand" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cmd" type="java.util.List<java.lang.String>"/>
|
|
<param name="isExecutable" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add quotes to each of the command strings and
|
|
return as a single string
|
|
@param cmd The command to be quoted
|
|
@param isExecutable makes shell path if the first
|
|
argument is executable
|
|
@return returns The quoted string.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="captureDebugOut" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cmd" type="java.util.List<java.lang.String>"/>
|
|
<param name="debugoutFilename" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Wrap a command in a shell to capture debug script's
|
|
stdout and stderr to debugout.
|
|
@param cmd The command and the arguments that should be run
|
|
@param debugoutFilename The filename that stdout and stderr
|
|
should be saved to.
|
|
@return the modified command that should be run
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A simple logger to handle the task-specific user logs.
|
|
This class uses the system property <code>hadoop.log.dir</code>.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskLog -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskLog.LogName -->
|
|
<class name="TaskLog.LogName" extends="java.lang.Enum<org.apache.hadoop.mapred.TaskLog.LogName>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.TaskLog.LogName[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.TaskLog.LogName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The filter for userlogs.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskLog.LogName -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskLogAppender -->
|
|
<class name="TaskLogAppender" extends="org.apache.log4j.FileAppender"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TaskLogAppender"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="activateOptions"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="event" type="org.apache.log4j.spi.LoggingEvent"/>
|
|
</method>
|
|
<method name="flush"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getTaskId" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Getter/Setter methods for log4j.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTaskId"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="java.lang.String"/>
|
|
</method>
|
|
<method name="getTotalLogFileSize" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setTotalLogFileSize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logSize" type="long"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A simple log4j-appender for the task child's
|
|
map-reduce system logs.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskLogAppender -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskLogServlet -->
|
|
<class name="TaskLogServlet" extends="javax.servlet.http.HttpServlet"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TaskLogServlet"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getTaskLogUrl" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskTrackerHostName" type="java.lang.String"/>
|
|
<param name="httpPort" type="java.lang.String"/>
|
|
<param name="taskAttemptID" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Construct the taskLogUrl
|
|
@param taskTrackerHostName
|
|
@param httpPort
|
|
@param taskAttemptID
|
|
@return the taskLogUrl]]>
|
|
</doc>
|
|
</method>
|
|
<method name="doGet"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="request" type="javax.servlet.http.HttpServletRequest"/>
|
|
<param name="response" type="javax.servlet.http.HttpServletResponse"/>
|
|
<exception name="ServletException" type="javax.servlet.ServletException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the logs via http.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A servlet that is run by the TaskTrackers to provide the task logs via http.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskLogServlet -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskReport -->
|
|
<class name="TaskReport" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="TaskReport"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getTaskId" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use {@link #getTaskID()} instead">
|
|
<doc>
|
|
<![CDATA[@deprecated use {@link #getTaskID()} instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskID" return="org.apache.hadoop.mapred.TaskID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The id of the task.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The amount completed, between zero and one.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getState" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The most recent state, reported by a {@link Reporter}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDiagnostics" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A list of error messages.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCounters" return="org.apache.hadoop.mapred.Counters"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A table of counters.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFinishTime" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get finish time of task.
|
|
@return 0, if finish time was not set else returns finish time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStartTime" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get start time of task.
|
|
@return 0 if start time was not set, else start time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A report on the state of a task.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskReport -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskTracker -->
|
|
<class name="TaskTracker" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.MRConstants"/>
|
|
<implements name="org.apache.hadoop.mapred.TaskUmbilicalProtocol"/>
|
|
<implements name="java.lang.Runnable"/>
|
|
<constructor name="TaskTracker" type="org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Start with the local machine name, and the default JobTracker]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getTaskTrackerInstrumentation" return="org.apache.hadoop.mapred.TaskTrackerInstrumentation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getProtocolVersion" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="protocol" type="java.lang.String"/>
|
|
<param name="clientVersion" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getInstrumentationClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.TaskTrackerInstrumentation>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="setInstrumentationClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="t" type="java.lang.Class<? extends org.apache.hadoop.mapred.TaskTrackerInstrumentation>"/>
|
|
</method>
|
|
<method name="cleanupStorage"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Removes all contents of temporary storage. Called upon
|
|
startup, to remove any leftovers from previous run.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="shutdown"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close down the TaskTracker and all its components. We must also shutdown
|
|
any running tasks or threads, and cleanup disk space. A new TaskTracker
|
|
within the same process space might be restarted, so everything must be
|
|
clean.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobClient" return="org.apache.hadoop.mapred.InterTrackerProtocol"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The connection to the JobTracker, used by the TaskRunner
|
|
for locating remote files.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskTrackerReportAddress" return="java.net.InetSocketAddress"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the port at which the tasktracker bound to]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJvmManagerInstance" return="org.apache.hadoop.mapred.JvmManager"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The server retry loop.
|
|
This while-loop attempts to connect to the JobTracker. It only
|
|
loops when the old TaskTracker has gone bad (its state is
|
|
stale somehow) and we need to reinitialize everything.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTask" return="org.apache.hadoop.mapred.JvmTask"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jvmId" type="org.apache.hadoop.mapred.JVMId"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called upon startup by the child process, to fetch Task data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="statusUpdate" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="taskStatus" type="org.apache.hadoop.mapred.TaskStatus"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called periodically to report Task progress, from 0.0 to 1.0.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reportDiagnosticInfo"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="info" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called when the task dies before completion, and we want to report back
|
|
diagnostic info]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reportNextRecordRange"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="range" type="org.apache.hadoop.mapred.SortedRanges.Range"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="ping" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Child checking to see if we're alive. Normally does nothing.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="commitPending"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="taskStatus" type="org.apache.hadoop.mapred.TaskStatus"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Task is reporting that it is in commit_pending
|
|
and it is waiting for the commit Response]]>
|
|
</doc>
|
|
</method>
|
|
<method name="canCommit" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<doc>
|
|
<![CDATA[Child checking whether it can commit]]>
|
|
</doc>
|
|
</method>
|
|
<method name="done"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The task is done.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="shuffleError"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="message" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[A reduce-task failed to shuffle the map-outputs. Kill the task.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fsError"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskId" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="message" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[A child task had a local filesystem error. Kill the task.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapCompletionEvents" return="org.apache.hadoop.mapred.MapTaskCompletionEventsUpdate"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobId" type="org.apache.hadoop.mapred.JobID"/>
|
|
<param name="fromEventId" type="int"/>
|
|
<param name="maxLocs" type="int"/>
|
|
<param name="id" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="mapOutputLost"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="taskid" type="org.apache.hadoop.mapred.TaskAttemptID"/>
|
|
<param name="errorMsg" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[A completed map task's output has been lost.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isIdle" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Is this task tracker idle?
|
|
@return has this task tracker finished and cleaned up all of its tasks?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="argv" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[Start the TaskTracker, point toward the indicated JobTracker]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isTaskMemoryManagerEnabled" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Is the TaskMemoryManager Enabled on this system?
|
|
@return true if enabled, false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTaskMemoryManager" return="org.apache.hadoop.mapred.TaskMemoryManagerThread"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="MR_CLIENTTRACE_FORMAT" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="ClientTraceLog" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[TaskTracker is a process that starts and tracks MR Tasks
|
|
in a networked environment. It contacts the JobTracker
|
|
for Task assignments and reporting results.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskTracker -->
|
|
<!-- start class org.apache.hadoop.mapred.TaskTracker.MapOutputServlet -->
|
|
<class name="TaskTracker.MapOutputServlet" extends="javax.servlet.http.HttpServlet"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TaskTracker.MapOutputServlet"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="doGet"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="request" type="javax.servlet.http.HttpServletRequest"/>
|
|
<param name="response" type="javax.servlet.http.HttpServletResponse"/>
|
|
<exception name="ServletException" type="javax.servlet.ServletException"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class is used in TaskTracker's Jetty to serve the map outputs
|
|
to other nodes.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TaskTracker.MapOutputServlet -->
|
|
<!-- start class org.apache.hadoop.mapred.TextInputFormat -->
|
|
<class name="TextInputFormat" extends="org.apache.hadoop.mapred.FileInputFormat<org.apache.hadoop.io.LongWritable, org.apache.hadoop.io.Text>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<constructor name="TextInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="isSplitable" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="file" type="org.apache.hadoop.fs.Path"/>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.LongWritable, org.apache.hadoop.io.Text>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="genericSplit" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link InputFormat} for plain text files. Files are broken into lines.
|
|
Either linefeed or carriage-return are used to signal end of line. Keys are
|
|
the position in the file, and values are the line of text..]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TextInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.TextOutputFormat -->
|
|
<class name="TextOutputFormat" extends="org.apache.hadoop.mapred.FileOutputFormat<K, V>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TextOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link OutputFormat} that writes plain text files.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TextOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.TextOutputFormat.LineRecordWriter -->
|
|
<class name="TextOutputFormat.LineRecordWriter" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordWriter<K, V>"/>
|
|
<constructor name="TextOutputFormat.LineRecordWriter" type="java.io.DataOutputStream, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="TextOutputFormat.LineRecordWriter" type="java.io.DataOutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="out" type="java.io.DataOutputStream"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.TextOutputFormat.LineRecordWriter -->
|
|
</package>
|
|
<package name="org.apache.hadoop.mapred.jobcontrol">
|
|
<!-- start class org.apache.hadoop.mapred.jobcontrol.Job -->
|
|
<class name="Job" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Job" type="org.apache.hadoop.mapred.JobConf, java.util.ArrayList<org.apache.hadoop.mapred.jobcontrol.Job>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a job.
|
|
@param jobConf a mapred job configuration representing a job to be executed.
|
|
@param dependingJobs an array of jobs the current job depends on]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Job" type="org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a job.
|
|
|
|
@param jobConf mapred job configuration representing a job to be executed.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getJobName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the job name of this job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the job name for this job.
|
|
@param jobName the job name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobID" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the job ID of this job assigned by JobControl]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="id" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the job ID for this job.
|
|
@param id the job ID]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMapredJobID" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use {@link #getAssignedJobID()} instead">
|
|
<doc>
|
|
<![CDATA[@return the mapred ID of this job
|
|
@deprecated use {@link #getAssignedJobID()} instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMapredJobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="use {@link #setAssignedJobID(JobID)} instead">
|
|
<param name="mapredJobID" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the mapred ID for this job.
|
|
@param mapredJobID the mapred job ID for this job.
|
|
@deprecated use {@link #setAssignedJobID(JobID)} instead]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAssignedJobID" return="org.apache.hadoop.mapred.JobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the mapred ID of this job as assigned by the
|
|
mapred framework.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setAssignedJobID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="mapredJobID" type="org.apache.hadoop.mapred.JobID"/>
|
|
<doc>
|
|
<![CDATA[Set the mapred ID for this job as assigned by the
|
|
mapred framework.
|
|
@param mapredJobID the mapred job ID for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobConf" return="org.apache.hadoop.mapred.JobConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the mapred job conf of this job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setJobConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Set the mapred job conf for this job.
|
|
@param jobConf the mapred job conf for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getState" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the state of this job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setState"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="state" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the state for this job.
|
|
@param state the new state for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMessage" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the message of this job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMessage"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="message" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the message for this job.
|
|
@param message the message for this job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getJobClient" return="org.apache.hadoop.mapred.JobClient"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the job client of this job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDependingJobs" return="java.util.ArrayList<org.apache.hadoop.mapred.jobcontrol.Job>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the depending jobs of this job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addDependingJob" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dependingJob" type="org.apache.hadoop.mapred.jobcontrol.Job"/>
|
|
<doc>
|
|
<![CDATA[Add a job to this jobs' dependency list. Dependent jobs can only be added while a Job
|
|
is waiting to run, not during or afterwards.
|
|
|
|
@param dependingJob Job that this Job depends on.
|
|
@return <tt>true</tt> if the Job was added.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isCompleted" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return true if this job is in a complete state]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isReady" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return true if this job is in READY state]]>
|
|
</doc>
|
|
</method>
|
|
<method name="submit"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Submit this job to mapred. The state becomes RUNNING if submission
|
|
is successful, FAILED otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="SUCCESS" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="WAITING" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RUNNING" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="READY" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="FAILED" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DEPENDENT_FAILED" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This class encapsulates a MapReduce job and its dependency. It monitors
|
|
the states of the depending jobs and updates the state of this job.
|
|
A job starts in the WAITING state. If it does not have any depending jobs, or
|
|
all of the depending jobs are in SUCCESS state, then the job state will become
|
|
READY. If any depending jobs fail, the job will fail too.
|
|
When in READY state, the job can be submitted to Hadoop for execution, with
|
|
the state changing into RUNNING state. From RUNNING state, the job can get into
|
|
SUCCESS or FAILED state, depending the status of the job execution.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.jobcontrol.Job -->
|
|
<!-- start class org.apache.hadoop.mapred.jobcontrol.JobControl -->
|
|
<class name="JobControl" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Runnable"/>
|
|
<constructor name="JobControl" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a job control for a group of jobs.
|
|
@param groupName a name identifying this group]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getWaitingJobs" return="java.util.ArrayList<org.apache.hadoop.mapred.jobcontrol.Job>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the jobs in the waiting state]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRunningJobs" return="java.util.ArrayList<org.apache.hadoop.mapred.jobcontrol.Job>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the jobs in the running state]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReadyJobs" return="java.util.ArrayList<org.apache.hadoop.mapred.jobcontrol.Job>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the jobs in the ready state]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSuccessfulJobs" return="java.util.ArrayList<org.apache.hadoop.mapred.jobcontrol.Job>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the jobs in the success state]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFailedJobs" return="java.util.ArrayList<org.apache.hadoop.mapred.jobcontrol.Job>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="addJob" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="aJob" type="org.apache.hadoop.mapred.jobcontrol.Job"/>
|
|
<doc>
|
|
<![CDATA[Add a new job.
|
|
@param aJob the the new job]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addJobs"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobs" type="java.util.Collection<org.apache.hadoop.mapred.jobcontrol.Job>"/>
|
|
<doc>
|
|
<![CDATA[Add a collection of jobs
|
|
|
|
@param jobs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getState" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the thread state]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stop"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[set the thread state to STOPPING so that the
|
|
thread will stop when it wakes up.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="suspend"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[suspend the running thread]]>
|
|
</doc>
|
|
</method>
|
|
<method name="resume"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[resume the suspended thread]]>
|
|
</doc>
|
|
</method>
|
|
<method name="allFinished" return="boolean"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The main loop for the thread.
|
|
The loop does the following:
|
|
Check the states of the running jobs
|
|
Update the states of waiting jobs
|
|
Submit the jobs in ready state]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class encapsulates a set of MapReduce jobs and its dependency. It tracks
|
|
the states of the jobs by placing them into different tables according to their
|
|
states.
|
|
|
|
This class provides APIs for the client app to add a job to the group and to get
|
|
the jobs in the group in different states. When a
|
|
job is added, an ID unique to the group is assigned to the job.
|
|
|
|
This class has a thread that submits jobs when they become ready, monitors the
|
|
states of the running jobs, and updates the states of jobs based on the state changes
|
|
of their depending jobs states. The class provides APIs for suspending/resuming
|
|
the thread,and for stopping the thread.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.jobcontrol.JobControl -->
|
|
</package>
|
|
<package name="org.apache.hadoop.mapred.join">
|
|
<!-- start class org.apache.hadoop.mapred.join.ArrayListBackedIterator -->
|
|
<class name="ArrayListBackedIterator" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ResetableIterator<X>"/>
|
|
<constructor name="ArrayListBackedIterator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="ArrayListBackedIterator" type="java.util.ArrayList<X>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="X extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="replay" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="X extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="item" type="X extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class provides an implementation of ResetableIterator. The
|
|
implementation uses an {@link java.util.ArrayList} to store elements
|
|
added to it, replaying them as requested.
|
|
Prefer {@link StreamBackedIterator}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.ArrayListBackedIterator -->
|
|
<!-- start interface org.apache.hadoop.mapred.join.ComposableInputFormat -->
|
|
<interface name="ComposableInputFormat" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputFormat<K, V>"/>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.join.ComposableRecordReader<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Refinement of InputFormat requiring implementors to provide
|
|
ComposableRecordReader instead of RecordReader.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.join.ComposableInputFormat -->
|
|
<!-- start interface org.apache.hadoop.mapred.join.ComposableRecordReader -->
|
|
<interface name="ComposableRecordReader" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordReader<K, V>"/>
|
|
<implements name="java.lang.Comparable<org.apache.hadoop.mapred.join.ComposableRecordReader<K, ?>>"/>
|
|
<method name="id" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the position in the collector this class occupies.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="key" return="K extends org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the key this RecordReader would supply on a call to next(K,V)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="key"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Clone the key at the head of this RecordReader into the object provided.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if the stream is not empty, but provides no guarantee that
|
|
a call to next(K,V) will succeed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="skip"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Skip key-value pairs with keys less than or equal to the key provided.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="accept"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jc" type="org.apache.hadoop.mapred.join.CompositeRecordReader.JoinCollector"/>
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[While key-value pairs from this RecordReader match the given key, register
|
|
them with the JoinCollector provided.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Additional operations required of a RecordReader to participate in a join.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.join.ComposableRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.join.CompositeInputFormat -->
|
|
<class name="CompositeInputFormat" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ComposableInputFormat<K, org.apache.hadoop.mapred.join.TupleWritable>"/>
|
|
<constructor name="CompositeInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setFormat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Interpret a given string as a composite expression.
|
|
{@code
|
|
func ::= <ident>([<func>,]*<func>)
|
|
func ::= tbl(<class>,"<path>")
|
|
class ::= @see java.lang.Class#forName(java.lang.String)
|
|
path ::= @see org.apache.hadoop.fs.Path#Path(java.lang.String)
|
|
}
|
|
Reads expression from the <tt>mapred.join.expr</tt> property and
|
|
user-supplied join types from <tt>mapred.join.define.<ident></tt>
|
|
types. Paths supplied to <tt>tbl</tt> are given as input paths to the
|
|
InputFormat class listed.
|
|
@see #compose(java.lang.String, java.lang.Class, java.lang.String...)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addDefaults"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Adds the default set of identifiers to the parser.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSplits" return="org.apache.hadoop.mapred.InputSplit[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="numSplits" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Build a CompositeInputSplit from the child InputFormats by assigning the
|
|
ith split from each child to the ith composite split.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.join.ComposableRecordReader<K, org.apache.hadoop.mapred.join.TupleWritable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Construct a CompositeRecordReader for the children of this InputFormat
|
|
as defined in the init expression.
|
|
The outermost join need only be composable, not necessarily a composite.
|
|
Mandating TupleWritable isn't strictly correct.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compose" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inf" type="java.lang.Class<? extends org.apache.hadoop.mapred.InputFormat>"/>
|
|
<param name="path" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Convenience method for constructing composite formats.
|
|
Given InputFormat class (inf), path (p) return:
|
|
{@code tbl(<inf>, <p>) }]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compose" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="op" type="java.lang.String"/>
|
|
<param name="inf" type="java.lang.Class<? extends org.apache.hadoop.mapred.InputFormat>"/>
|
|
<param name="path" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Convenience method for constructing composite formats.
|
|
Given operation (op), Object class (inf), set of paths (p) return:
|
|
{@code <op>(tbl(<inf>,<p1>),tbl(<inf>,<p2>),...,tbl(<inf>,<pn>)) }]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compose" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="op" type="java.lang.String"/>
|
|
<param name="inf" type="java.lang.Class<? extends org.apache.hadoop.mapred.InputFormat>"/>
|
|
<param name="path" type="org.apache.hadoop.fs.Path[]"/>
|
|
<doc>
|
|
<![CDATA[Convenience method for constructing composite formats.
|
|
Given operation (op), Object class (inf), set of paths (p) return:
|
|
{@code <op>(tbl(<inf>,<p1>),tbl(<inf>,<p2>),...,tbl(<inf>,<pn>)) }]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An InputFormat capable of performing joins over a set of data sources sorted
|
|
and partitioned the same way.
|
|
@see #setFormat
|
|
|
|
A user may define new join types by setting the property
|
|
<tt>mapred.join.define.<ident></tt> to a classname. In the expression
|
|
<tt>mapred.join.expr</tt>, the identifier will be assumed to be a
|
|
ComposableRecordReader.
|
|
<tt>mapred.join.keycomparator</tt> can be a classname used to compare keys
|
|
in the join.
|
|
@see JoinRecordReader
|
|
@see MultiFilterRecordReader]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.CompositeInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.join.CompositeInputSplit -->
|
|
<class name="CompositeInputSplit" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputSplit"/>
|
|
<constructor name="CompositeInputSplit"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="CompositeInputSplit" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add an InputSplit to this collection.
|
|
@throws IOException If capacity was not specified during construction
|
|
or if capacity has been reached.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.mapred.InputSplit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<doc>
|
|
<![CDATA[Get ith child InputSplit.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Return the aggregate length of all child InputSplits currently added.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Get the length of ith child InputSplit.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocations" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Collect a set of hosts from all child InputSplits.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLocation" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[getLocations from ith InputSplit.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write splits in the following format.
|
|
{@code
|
|
<count><class1><class2>...<classn><split1><split2>...<splitn>
|
|
}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}
|
|
@throws IOException If the child InputSplit cannot be read, typically
|
|
for faliing access checks.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This InputSplit contains a set of child InputSplits. Any InputSplit inserted
|
|
into this collection must have a public default constructor.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.CompositeInputSplit -->
|
|
<!-- start class org.apache.hadoop.mapred.join.CompositeRecordReader -->
|
|
<class name="CompositeRecordReader" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="CompositeRecordReader" type="int, int, java.lang.Class<? extends org.apache.hadoop.io.WritableComparator>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a RecordReader with <tt>capacity</tt> children to position
|
|
<tt>id</tt> in the parent reader.
|
|
The id of a root CompositeRecordReader is -1 by convention, but relying
|
|
on this is not recommended.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="combine" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="srcs" type="java.lang.Object[]"/>
|
|
<param name="value" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
</method>
|
|
<method name="id" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the position in the collector this class occupies.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordReaderQueue" return="java.util.PriorityQueue<org.apache.hadoop.mapred.join.ComposableRecordReader<K, ?>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return sorted list of RecordReaders for this composite.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getComparator" return="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return comparator defining the ordering for RecordReaders in this
|
|
composite.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rr" type="org.apache.hadoop.mapred.join.ComposableRecordReader<K, ? extends V>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add a RecordReader to this collection.
|
|
The id() of a RecordReader determines where in the Tuple its
|
|
entry will appear. Adding RecordReaders with the same id has
|
|
undefined behavior.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="key" return="K extends org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the key for the current join or the value at the top of the
|
|
RecordReader heap.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="key"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Clone the key at the top of this RR into the given object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return true if it is possible that this could emit more values.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="skip"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Pass skip key to child RRs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDelegate" return="org.apache.hadoop.mapred.join.ResetableIterator<X>"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Obtain an iterator over the child RRs apropos of the value type
|
|
ultimately emitted from this join.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="accept"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jc" type="org.apache.hadoop.mapred.join.CompositeRecordReader.JoinCollector"/>
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[If key provided matches that of this Composite, give JoinCollector
|
|
iterator over values it may emit.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fillJoinCollector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="iterkey" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[For all child RRs offering the key provided, obtain an iterator
|
|
at that position in the JoinCollector.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.mapred.join.ComposableRecordReader<K, ?>"/>
|
|
<doc>
|
|
<![CDATA[Implement Comparable contract (compare key of join or head of heap
|
|
with that of another).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createKey" return="K extends org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new key value common to all child RRs.
|
|
@throws ClassCastException if key classes differ.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createInternalValue" return="org.apache.hadoop.mapred.join.TupleWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a value to be used internally for joins.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Unsupported (returns zero in all cases).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close all child RRs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Report progress as the minimum of all child RR progress.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="jc" type="org.apache.hadoop.mapred.join.CompositeRecordReader<K, V, X>.JoinCollector"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="kids" type="org.apache.hadoop.mapred.join.ComposableRecordReader[]"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A RecordReader that can effect joins of RecordReaders sharing a common key
|
|
type and partitioning.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.CompositeRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.join.InnerJoinRecordReader -->
|
|
<class name="InnerJoinRecordReader" extends="org.apache.hadoop.mapred.join.JoinRecordReader<K>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="combine" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="srcs" type="java.lang.Object[]"/>
|
|
<param name="dst" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<doc>
|
|
<![CDATA[Return true iff the tuple is full (all data sources contain this key).]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Full inner join.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.InnerJoinRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.join.JoinRecordReader -->
|
|
<class name="JoinRecordReader" extends="org.apache.hadoop.mapred.join.CompositeRecordReader<K, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.join.TupleWritable>"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ComposableRecordReader<K, org.apache.hadoop.mapred.join.TupleWritable>"/>
|
|
<constructor name="JoinRecordReader" type="int, org.apache.hadoop.mapred.JobConf, int, java.lang.Class<? extends org.apache.hadoop.io.WritableComparator>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Emit the next set of key, value pairs as defined by the child
|
|
RecordReaders and operation associated with this composite RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createValue" return="org.apache.hadoop.mapred.join.TupleWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDelegate" return="org.apache.hadoop.mapred.join.ResetableIterator<org.apache.hadoop.mapred.join.TupleWritable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return an iterator wrapping the JoinCollector.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Base class for Composite joins returning Tuples of arbitrary Writables.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.JoinRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.join.JoinRecordReader.JoinDelegationIterator -->
|
|
<class name="JoinRecordReader.JoinDelegationIterator" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ResetableIterator<org.apache.hadoop.mapred.join.TupleWritable>"/>
|
|
<constructor name="JoinRecordReader.JoinDelegationIterator"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="replay" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="item" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Since the JoinCollector is effecting our operation, we need only
|
|
provide an iterator proxy wrapping its operation.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.JoinRecordReader.JoinDelegationIterator -->
|
|
<!-- start class org.apache.hadoop.mapred.join.MultiFilterRecordReader -->
|
|
<class name="MultiFilterRecordReader" extends="org.apache.hadoop.mapred.join.CompositeRecordReader<K, V, V>"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ComposableRecordReader<K, V>"/>
|
|
<constructor name="MultiFilterRecordReader" type="int, org.apache.hadoop.mapred.JobConf, int, java.lang.Class<? extends org.apache.hadoop.io.WritableComparator>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="emit" return="V extends org.apache.hadoop.io.Writable"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="dst" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[For each tuple emitted, return a value (typically one of the values
|
|
in the tuple).
|
|
Modifying the Writables in the tuple is permitted and unlikely to affect
|
|
join behavior in most cases, but it is not recommended. It's safer to
|
|
clone first.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="combine" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="srcs" type="java.lang.Object[]"/>
|
|
<param name="dst" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<doc>
|
|
<![CDATA[Default implementation offers {@link #emit} every Tuple from the
|
|
collector (the outer join of child RRs).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="V extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createValue" return="V extends org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDelegate" return="org.apache.hadoop.mapred.join.ResetableIterator<V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return an iterator returning a single value from the tuple.
|
|
@see MultiFilterDelegationIterator]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Base class for Composite join returning values derived from multiple
|
|
sources, but generally not tuples.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.MultiFilterRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.join.MultiFilterRecordReader.MultiFilterDelegationIterator -->
|
|
<class name="MultiFilterRecordReader.MultiFilterDelegationIterator" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ResetableIterator<V>"/>
|
|
<constructor name="MultiFilterRecordReader.MultiFilterDelegationIterator"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="V extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="replay" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="V extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="item" type="V extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Proxy the JoinCollector, but include callback to emit.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.MultiFilterRecordReader.MultiFilterDelegationIterator -->
|
|
<!-- start class org.apache.hadoop.mapred.join.OuterJoinRecordReader -->
|
|
<class name="OuterJoinRecordReader" extends="org.apache.hadoop.mapred.join.JoinRecordReader<K>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="combine" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="srcs" type="java.lang.Object[]"/>
|
|
<param name="dst" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<doc>
|
|
<![CDATA[Emit everything from the collector.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Full outer join.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.OuterJoinRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.join.OverrideRecordReader -->
|
|
<class name="OverrideRecordReader" extends="org.apache.hadoop.mapred.join.MultiFilterRecordReader<K, V>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="emit" return="V extends org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="dst" type="org.apache.hadoop.mapred.join.TupleWritable"/>
|
|
<doc>
|
|
<![CDATA[Emit the value with the highest position in the tuple.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="fillJoinCollector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="iterkey" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Instead of filling the JoinCollector with iterators from all
|
|
data sources, fill only the rightmost for this key.
|
|
This not only saves space by discarding the other sources, but
|
|
it also emits the number of key-value pairs in the preferred
|
|
RecordReader instead of repeating that stream n times, where
|
|
n is the cardinality of the cross product of the discarded
|
|
streams for the given key.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Prefer the "rightmost" data source for this key.
|
|
For example, <tt>override(S1,S2,S3)</tt> will prefer values
|
|
from S3 over S2, and values from S2 over S1 for all keys
|
|
emitted from all sources.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.OverrideRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.join.Parser -->
|
|
<class name="Parser" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Parser"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Very simple shift-reduce parser for join expressions.
|
|
|
|
This should be sufficient for the user extension permitted now, but ought to
|
|
be replaced with a parser generator if more complex grammars are supported.
|
|
In particular, this "shift-reduce" parser has no states. Each set
|
|
of formals requires a different internal node type, which is responsible for
|
|
interpreting the list of tokens it receives. This is sufficient for the
|
|
current grammar, but it has several annoying properties that might inhibit
|
|
extension. In particular, parenthesis are always function calls; an
|
|
algebraic or filter grammar would not only require a node type, but must
|
|
also work around the internals of this parser.
|
|
|
|
For most other cases, adding classes to the hierarchy- particularly by
|
|
extending JoinRecordReader and MultiFilterRecordReader- is fairly
|
|
straightforward. One need only override the relevant method(s) (usually only
|
|
{@link CompositeRecordReader#combine}) and include a property to map its
|
|
value to an identifier in the parser.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.Parser -->
|
|
<!-- start class org.apache.hadoop.mapred.join.Parser.Node -->
|
|
<class name="Parser.Node" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ComposableInputFormat"/>
|
|
<constructor name="Parser.Node" type="java.lang.String"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="addIdentifier"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="ident" type="java.lang.String"/>
|
|
<param name="mcstrSig" type="java.lang.Class[]"/>
|
|
<param name="nodetype" type="java.lang.Class<? extends org.apache.hadoop.mapred.join.Parser.Node>"/>
|
|
<param name="cl" type="java.lang.Class<? extends org.apache.hadoop.mapred.join.ComposableRecordReader>"/>
|
|
<exception name="NoSuchMethodException" type="java.lang.NoSuchMethodException"/>
|
|
<doc>
|
|
<![CDATA[For a given identifier, add a mapping to the nodetype for the parse
|
|
tree and to the ComposableRecordReader to be created, including the
|
|
formals required to invoke the constructor.
|
|
The nodetype and constructor signature should be filled in from the
|
|
child node.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="id" type="int"/>
|
|
</method>
|
|
<method name="setKeyComparator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="cmpcl" type="java.lang.Class<? extends org.apache.hadoop.io.WritableComparator>"/>
|
|
</method>
|
|
<field name="rrCstrMap" type="java.util.Map<java.lang.String, java.lang.reflect.Constructor<? extends org.apache.hadoop.mapred.join.ComposableRecordReader>>"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="id" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="ident" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="cmpcl" type="java.lang.Class<? extends org.apache.hadoop.io.WritableComparator>"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.Parser.Node -->
|
|
<!-- start class org.apache.hadoop.mapred.join.Parser.NodeToken -->
|
|
<class name="Parser.NodeToken" extends="org.apache.hadoop.mapred.join.Parser.Token"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getNode" return="org.apache.hadoop.mapred.join.Parser.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.Parser.NodeToken -->
|
|
<!-- start class org.apache.hadoop.mapred.join.Parser.NumToken -->
|
|
<class name="Parser.NumToken" extends="org.apache.hadoop.mapred.join.Parser.Token"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Parser.NumToken" type="double"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getNum" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.Parser.NumToken -->
|
|
<!-- start class org.apache.hadoop.mapred.join.Parser.StrToken -->
|
|
<class name="Parser.StrToken" extends="org.apache.hadoop.mapred.join.Parser.Token"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Parser.StrToken" type="org.apache.hadoop.mapred.join.Parser.TType, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getStr" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.Parser.StrToken -->
|
|
<!-- start class org.apache.hadoop.mapred.join.Parser.Token -->
|
|
<class name="Parser.Token" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getType" return="org.apache.hadoop.mapred.join.Parser.TType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getNode" return="org.apache.hadoop.mapred.join.Parser.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getNum" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getStr" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Tagged-union type for tokens from the join expression.
|
|
@see Parser.TType]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.Parser.Token -->
|
|
<!-- start class org.apache.hadoop.mapred.join.Parser.TType -->
|
|
<class name="Parser.TType" extends="java.lang.Enum<org.apache.hadoop.mapred.join.Parser.TType>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.mapred.join.Parser.TType[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.mapred.join.Parser.TType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.Parser.TType -->
|
|
<!-- start interface org.apache.hadoop.mapred.join.ResetableIterator -->
|
|
<interface name="ResetableIterator" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[True if a call to next may return a value. This is permitted false
|
|
positives, but not false negatives.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="T extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Assign next value to actual.
|
|
It is required that elements added to a ResetableIterator be returned in
|
|
the same order after a call to {@link #reset} (FIFO).
|
|
|
|
Note that a call to this may fail for nested joins (i.e. more elements
|
|
available, but none satisfying the constraints of the join)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="replay" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="T extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Assign last value returned to actual.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Set iterator to return to the start of its range. Must be called after
|
|
calling {@link #add} to avoid a ConcurrentModificationException.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="item" type="T extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add an element to the collection of elements to iterate over.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close datasources and release resources. Calling methods on the iterator
|
|
after calling close has undefined behavior.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Close datasources, but do not release internal resources. Calling this
|
|
method should permit the object to be reused with a different datasource.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This defines an interface to a stateful Iterator that can replay elements
|
|
added to it directly.
|
|
Note that this does not extend {@link java.util.Iterator}.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.join.ResetableIterator -->
|
|
<!-- start class org.apache.hadoop.mapred.join.ResetableIterator.EMPTY -->
|
|
<class name="ResetableIterator.EMPTY" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ResetableIterator<U>"/>
|
|
<constructor name="ResetableIterator.EMPTY"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="U extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="replay" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="U extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="item" type="U extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.ResetableIterator.EMPTY -->
|
|
<!-- start class org.apache.hadoop.mapred.join.StreamBackedIterator -->
|
|
<class name="StreamBackedIterator" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ResetableIterator<X>"/>
|
|
<constructor name="StreamBackedIterator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="X extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="replay" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="X extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="item" type="X extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class provides an implementation of ResetableIterator. This
|
|
implementation uses a byte array to store elements added to it.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.StreamBackedIterator -->
|
|
<!-- start class org.apache.hadoop.mapred.join.TupleWritable -->
|
|
<class name="TupleWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<implements name="java.lang.Iterable<org.apache.hadoop.io.Writable>"/>
|
|
<constructor name="TupleWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create an empty tuple with no allocated storage for writables.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="TupleWritable" type="org.apache.hadoop.io.Writable[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Initialize tuple with storage; unknown whether any of them contain
|
|
"written" values.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="has" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<doc>
|
|
<![CDATA[Return true if tuple has an element at the position provided.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<doc>
|
|
<![CDATA[Get ith Writable from Tuple.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="size" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of children in this Tuple.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="iterator" return="java.util.Iterator<org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return an iterator over the elements in this tuple.
|
|
Note that this doesn't flatten the tuple; one may receive tuples
|
|
from this iterator.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Convert Tuple to String as in the following.
|
|
<tt>[<child1>,<child2>,...,<childn>]</tt>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes each Writable to <code>out</code>.
|
|
TupleWritable format:
|
|
{@code
|
|
<count><type1><type2>...<typen><obj1><obj2>...<objn>
|
|
}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Writable type storing multiple {@link org.apache.hadoop.io.Writable}s.
|
|
|
|
This is *not* a general-purpose tuple type. In almost all cases, users are
|
|
encouraged to implement their own serializable types, which can perform
|
|
better validation and provide more efficient encodings than this class is
|
|
capable. TupleWritable relies on the join framework for type safety and
|
|
assumes its instances will rarely be persisted, assumptions not only
|
|
incompatible with, but contrary to the general case.
|
|
|
|
@see org.apache.hadoop.io.Writable]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.TupleWritable -->
|
|
<!-- start class org.apache.hadoop.mapred.join.WrappedRecordReader -->
|
|
<class name="WrappedRecordReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.join.ComposableRecordReader<K, U>"/>
|
|
<method name="id" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="key" return="K extends org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the key at the head of this RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="key"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="qkey" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Clone the key at the head of this RR into the object supplied.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hasNext" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return true if the RR- including the k,v pair stored in this object-
|
|
is exhausted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="skip"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Skip key-value pairs with keys less than or equal to the key provided.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read the next k,v pair into the head of this object; return true iff
|
|
the RR and this are exhausted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="accept"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="org.apache.hadoop.mapred.join.CompositeRecordReader.JoinCollector"/>
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Add an iterator to the collector at the position occupied by this
|
|
RecordReader over the values in this stream paired with the key
|
|
provided (ie register a stream of values from this source matching K
|
|
with a collector).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="U extends org.apache.hadoop.io.Writable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write key-value pair at the head of this stream to the objects provided;
|
|
get next key-value pair from proxied RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createKey" return="K extends org.apache.hadoop.io.WritableComparable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Request new key from proxied RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createValue" return="U extends org.apache.hadoop.io.Writable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Request new value from proxied RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Request progress from proxied RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Request position from proxied RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Forward close request to proxied RR.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="org.apache.hadoop.mapred.join.ComposableRecordReader<K, ?>"/>
|
|
<doc>
|
|
<![CDATA[Implement Comparable contract (compare key at head of proxied RR
|
|
with that of another).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Return true iff compareTo(other) retn true.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Proxy class for a RecordReader participating in the join framework.
|
|
This class keeps track of the "head" key-value pair for the
|
|
provided RecordReader and keeps a store of values matching a key when
|
|
this source is participating in a join.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.join.WrappedRecordReader -->
|
|
</package>
|
|
<package name="org.apache.hadoop.mapred.lib">
|
|
<!-- start class org.apache.hadoop.mapred.lib.ChainMapper -->
|
|
<class name="ChainMapper" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper"/>
|
|
<constructor name="ChainMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addMapper"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="klass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Mapper<K1, V1, K2, V2>>"/>
|
|
<param name="inputKeyClass" type="java.lang.Class<? extends K1>"/>
|
|
<param name="inputValueClass" type="java.lang.Class<? extends V1>"/>
|
|
<param name="outputKeyClass" type="java.lang.Class<? extends K2>"/>
|
|
<param name="outputValueClass" type="java.lang.Class<? extends V2>"/>
|
|
<param name="byValue" type="boolean"/>
|
|
<param name="mapperConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Adds a Mapper class to the chain job's JobConf.
|
|
<p/>
|
|
It has to be specified how key and values are passed from one element of
|
|
the chain to the next, by value or by reference. If a Mapper leverages the
|
|
assumed semantics that the key and values are not modified by the collector
|
|
'by value' must be used. If the Mapper does not expect this semantics, as
|
|
an optimization to avoid serialization and deserialization 'by reference'
|
|
can be used.
|
|
<p/>
|
|
For the added Mapper the configuration given for it,
|
|
<code>mapperConf</code>, have precedence over the job's JobConf. This
|
|
precedence is in effect when the task is running.
|
|
<p/>
|
|
IMPORTANT: There is no need to specify the output key/value classes for the
|
|
ChainMapper, this is done by the addMapper for the last mapper in the chain
|
|
<p/>
|
|
|
|
@param job job's JobConf to add the Mapper class.
|
|
@param klass the Mapper class to add.
|
|
@param inputKeyClass mapper input key class.
|
|
@param inputValueClass mapper input value class.
|
|
@param outputKeyClass mapper output key class.
|
|
@param outputValueClass mapper output value class.
|
|
@param byValue indicates if key/values should be passed by value
|
|
to the next Mapper in the chain, if any.
|
|
@param mapperConf a JobConf with the configuration for the Mapper
|
|
class. It is recommended to use a JobConf without default values using the
|
|
<code>JobConf(boolean loadDefaults)</code> constructor with FALSE.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Configures the ChainMapper and all the Mappers in the chain.
|
|
<p/>
|
|
If this method is overriden <code>super.configure(...)</code> should be
|
|
invoked at the beginning of the overwriter method.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<param name="value" type="java.lang.Object"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Chains the <code>map(...)</code> methods of the Mappers in the chain.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Closes the ChainMapper and all the Mappers in the chain.
|
|
<p/>
|
|
If this method is overriden <code>super.close()</code> should be
|
|
invoked at the end of the overwriter method.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The ChainMapper class allows to use multiple Mapper classes within a single
|
|
Map task.
|
|
<p/>
|
|
The Mapper classes are invoked in a chained (or piped) fashion, the output of
|
|
the first becomes the input of the second, and so on until the last Mapper,
|
|
the output of the last Mapper will be written to the task's output.
|
|
<p/>
|
|
The key functionality of this feature is that the Mappers in the chain do not
|
|
need to be aware that they are executed in a chain. This enables having
|
|
reusable specialized Mappers that can be combined to perform composite
|
|
operations within a single task.
|
|
<p/>
|
|
Special care has to be taken when creating chains that the key/values output
|
|
by a Mapper are valid for the following Mapper in the chain. It is assumed
|
|
all Mappers and the Reduce in the chain use maching output and input key and
|
|
value classes as no conversion is done by the chaining code.
|
|
<p/>
|
|
Using the ChainMapper and the ChainReducer classes is possible to compose
|
|
Map/Reduce jobs that look like <code>[MAP+ / REDUCE MAP*]</code>. And
|
|
immediate benefit of this pattern is a dramatic reduction in disk IO.
|
|
<p/>
|
|
IMPORTANT: There is no need to specify the output key/value classes for the
|
|
ChainMapper, this is done by the addMapper for the last mapper in the chain.
|
|
<p/>
|
|
ChainMapper usage pattern:
|
|
<p/>
|
|
<pre>
|
|
...
|
|
conf.setJobName("chain");
|
|
conf.setInputFormat(TextInputFormat.class);
|
|
conf.setOutputFormat(TextOutputFormat.class);
|
|
<p/>
|
|
JobConf mapAConf = new JobConf(false);
|
|
...
|
|
ChainMapper.addMapper(conf, AMap.class, LongWritable.class, Text.class,
|
|
Text.class, Text.class, true, mapAConf);
|
|
<p/>
|
|
JobConf mapBConf = new JobConf(false);
|
|
...
|
|
ChainMapper.addMapper(conf, BMap.class, Text.class, Text.class,
|
|
LongWritable.class, Text.class, false, mapBConf);
|
|
<p/>
|
|
JobConf reduceConf = new JobConf(false);
|
|
...
|
|
ChainReducer.setReducer(conf, XReduce.class, LongWritable.class, Text.class,
|
|
Text.class, Text.class, true, reduceConf);
|
|
<p/>
|
|
ChainReducer.addMapper(conf, CMap.class, Text.class, Text.class,
|
|
LongWritable.class, Text.class, false, null);
|
|
<p/>
|
|
ChainReducer.addMapper(conf, DMap.class, LongWritable.class, Text.class,
|
|
LongWritable.class, LongWritable.class, true, null);
|
|
<p/>
|
|
FileInputFormat.setInputPaths(conf, inDir);
|
|
FileOutputFormat.setOutputPath(conf, outDir);
|
|
...
|
|
<p/>
|
|
JobClient jc = new JobClient(conf);
|
|
RunningJob job = jc.submitJob(conf);
|
|
...
|
|
</pre>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.ChainMapper -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.ChainReducer -->
|
|
<class name="ChainReducer" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Reducer"/>
|
|
<constructor name="ChainReducer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setReducer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="klass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Reducer<K1, V1, K2, V2>>"/>
|
|
<param name="inputKeyClass" type="java.lang.Class<? extends K1>"/>
|
|
<param name="inputValueClass" type="java.lang.Class<? extends V1>"/>
|
|
<param name="outputKeyClass" type="java.lang.Class<? extends K2>"/>
|
|
<param name="outputValueClass" type="java.lang.Class<? extends V2>"/>
|
|
<param name="byValue" type="boolean"/>
|
|
<param name="reducerConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Sets the Reducer class to the chain job's JobConf.
|
|
<p/>
|
|
It has to be specified how key and values are passed from one element of
|
|
the chain to the next, by value or by reference. If a Reducer leverages the
|
|
assumed semantics that the key and values are not modified by the collector
|
|
'by value' must be used. If the Reducer does not expect this semantics, as
|
|
an optimization to avoid serialization and deserialization 'by reference'
|
|
can be used.
|
|
<p/>
|
|
For the added Reducer the configuration given for it,
|
|
<code>reducerConf</code>, have precedence over the job's JobConf. This
|
|
precedence is in effect when the task is running.
|
|
<p/>
|
|
IMPORTANT: There is no need to specify the output key/value classes for the
|
|
ChainReducer, this is done by the setReducer or the addMapper for the last
|
|
element in the chain.
|
|
|
|
@param job job's JobConf to add the Reducer class.
|
|
@param klass the Reducer class to add.
|
|
@param inputKeyClass reducer input key class.
|
|
@param inputValueClass reducer input value class.
|
|
@param outputKeyClass reducer output key class.
|
|
@param outputValueClass reducer output value class.
|
|
@param byValue indicates if key/values should be passed by value
|
|
to the next Mapper in the chain, if any.
|
|
@param reducerConf a JobConf with the configuration for the Reducer
|
|
class. It is recommended to use a JobConf without default values using the
|
|
<code>JobConf(boolean loadDefaults)</code> constructor with FALSE.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addMapper"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="klass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Mapper<K1, V1, K2, V2>>"/>
|
|
<param name="inputKeyClass" type="java.lang.Class<? extends K1>"/>
|
|
<param name="inputValueClass" type="java.lang.Class<? extends V1>"/>
|
|
<param name="outputKeyClass" type="java.lang.Class<? extends K2>"/>
|
|
<param name="outputValueClass" type="java.lang.Class<? extends V2>"/>
|
|
<param name="byValue" type="boolean"/>
|
|
<param name="mapperConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Adds a Mapper class to the chain job's JobConf.
|
|
<p/>
|
|
It has to be specified how key and values are passed from one element of
|
|
the chain to the next, by value or by reference. If a Mapper leverages the
|
|
assumed semantics that the key and values are not modified by the collector
|
|
'by value' must be used. If the Mapper does not expect this semantics, as
|
|
an optimization to avoid serialization and deserialization 'by reference'
|
|
can be used.
|
|
<p/>
|
|
For the added Mapper the configuration given for it,
|
|
<code>mapperConf</code>, have precedence over the job's JobConf. This
|
|
precedence is in effect when the task is running.
|
|
<p/>
|
|
IMPORTANT: There is no need to specify the output key/value classes for the
|
|
ChainMapper, this is done by the addMapper for the last mapper in the chain
|
|
.
|
|
|
|
@param job chain job's JobConf to add the Mapper class.
|
|
@param klass the Mapper class to add.
|
|
@param inputKeyClass mapper input key class.
|
|
@param inputValueClass mapper input value class.
|
|
@param outputKeyClass mapper output key class.
|
|
@param outputValueClass mapper output value class.
|
|
@param byValue indicates if key/values should be passed by value
|
|
to the next Mapper in the chain, if any.
|
|
@param mapperConf a JobConf with the configuration for the Mapper
|
|
class. It is recommended to use a JobConf without default values using the
|
|
<code>JobConf(boolean loadDefaults)</code> constructor with FALSE.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Configures the ChainReducer, the Reducer and all the Mappers in the chain.
|
|
<p/>
|
|
If this method is overriden <code>super.configure(...)</code> should be
|
|
invoked at the beginning of the overwriter method.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<param name="values" type="java.util.Iterator"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Chains the <code>reduce(...)</code> method of the Reducer with the
|
|
<code>map(...) </code> methods of the Mappers in the chain.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Closes the ChainReducer, the Reducer and all the Mappers in the chain.
|
|
<p/>
|
|
If this method is overriden <code>super.close()</code> should be
|
|
invoked at the end of the overwriter method.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The ChainReducer class allows to chain multiple Mapper classes after a
|
|
Reducer within the Reducer task.
|
|
<p/>
|
|
For each record output by the Reducer, the Mapper classes are invoked in a
|
|
chained (or piped) fashion, the output of the first becomes the input of the
|
|
second, and so on until the last Mapper, the output of the last Mapper will
|
|
be written to the task's output.
|
|
<p/>
|
|
The key functionality of this feature is that the Mappers in the chain do not
|
|
need to be aware that they are executed after the Reducer or in a chain.
|
|
This enables having reusable specialized Mappers that can be combined to
|
|
perform composite operations within a single task.
|
|
<p/>
|
|
Special care has to be taken when creating chains that the key/values output
|
|
by a Mapper are valid for the following Mapper in the chain. It is assumed
|
|
all Mappers and the Reduce in the chain use maching output and input key and
|
|
value classes as no conversion is done by the chaining code.
|
|
<p/>
|
|
Using the ChainMapper and the ChainReducer classes is possible to compose
|
|
Map/Reduce jobs that look like <code>[MAP+ / REDUCE MAP*]</code>. And
|
|
immediate benefit of this pattern is a dramatic reduction in disk IO.
|
|
<p/>
|
|
IMPORTANT: There is no need to specify the output key/value classes for the
|
|
ChainReducer, this is done by the setReducer or the addMapper for the last
|
|
element in the chain.
|
|
<p/>
|
|
ChainReducer usage pattern:
|
|
<p/>
|
|
<pre>
|
|
...
|
|
conf.setJobName("chain");
|
|
conf.setInputFormat(TextInputFormat.class);
|
|
conf.setOutputFormat(TextOutputFormat.class);
|
|
<p/>
|
|
JobConf mapAConf = new JobConf(false);
|
|
...
|
|
ChainMapper.addMapper(conf, AMap.class, LongWritable.class, Text.class,
|
|
Text.class, Text.class, true, mapAConf);
|
|
<p/>
|
|
JobConf mapBConf = new JobConf(false);
|
|
...
|
|
ChainMapper.addMapper(conf, BMap.class, Text.class, Text.class,
|
|
LongWritable.class, Text.class, false, mapBConf);
|
|
<p/>
|
|
JobConf reduceConf = new JobConf(false);
|
|
...
|
|
ChainReducer.setReducer(conf, XReduce.class, LongWritable.class, Text.class,
|
|
Text.class, Text.class, true, reduceConf);
|
|
<p/>
|
|
ChainReducer.addMapper(conf, CMap.class, Text.class, Text.class,
|
|
LongWritable.class, Text.class, false, null);
|
|
<p/>
|
|
ChainReducer.addMapper(conf, DMap.class, LongWritable.class, Text.class,
|
|
LongWritable.class, LongWritable.class, true, null);
|
|
<p/>
|
|
FileInputFormat.setInputPaths(conf, inDir);
|
|
FileOutputFormat.setOutputPath(conf, outDir);
|
|
...
|
|
<p/>
|
|
JobClient jc = new JobClient(conf);
|
|
RunningJob job = jc.submitJob(conf);
|
|
...
|
|
</pre>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.ChainReducer -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.DelegatingInputFormat -->
|
|
<class name="DelegatingInputFormat" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputFormat<K, V>"/>
|
|
<constructor name="DelegatingInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getSplits" return="org.apache.hadoop.mapred.InputSplit[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="numSplits" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link InputFormat} that delegates behaviour of paths to multiple other
|
|
InputFormats.
|
|
|
|
@see MultipleInputs#addInputPath(JobConf, Path, Class, Class)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.DelegatingInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.DelegatingMapper -->
|
|
<class name="DelegatingMapper" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K1, V1, K2, V2>"/>
|
|
<constructor name="DelegatingMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K1"/>
|
|
<param name="value" type="V1"/>
|
|
<param name="outputCollector" type="org.apache.hadoop.mapred.OutputCollector<K2, V2>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An {@link Mapper} that delegates behaviour of paths to multiple other
|
|
mappers.
|
|
|
|
@see MultipleInputs#addInputPath(JobConf, Path, Class, Class)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.DelegatingMapper -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.FieldSelectionMapReduce -->
|
|
<class name="FieldSelectionMapReduce" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K, V, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<implements name="org.apache.hadoop.mapred.Reducer<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<constructor name="FieldSelectionMapReduce"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="val" type="V"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The identify function. Input key/value pair is written directly to output.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Text"/>
|
|
<param name="values" type="java.util.Iterator<org.apache.hadoop.io.Text>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This class implements a mapper/reducer class that can be used to perform
|
|
field selections in a manner similar to unix cut. The input data is treated
|
|
as fields separated by a user specified separator (the default value is
|
|
"\t"). The user can specify a list of fields that form the map output keys,
|
|
and a list of fields that form the map output values. If the inputformat is
|
|
TextInputFormat, the mapper will ignore the key to the map function. and the
|
|
fields are from the value only. Otherwise, the fields are the union of those
|
|
from the key and those from the value.
|
|
|
|
The field separator is under attribute "mapred.data.field.separator"
|
|
|
|
The map output field list spec is under attribute "map.output.key.value.fields.spec".
|
|
The value is expected to be like "keyFieldsSpec:valueFieldsSpec"
|
|
key/valueFieldsSpec are comma (,) separated field spec: fieldSpec,fieldSpec,fieldSpec ...
|
|
Each field spec can be a simple number (e.g. 5) specifying a specific field, or a range
|
|
(like 2-5) to specify a range of fields, or an open range (like 3-) specifying all
|
|
the fields starting from field 3. The open range field spec applies value fields only.
|
|
They have no effect on the key fields.
|
|
|
|
Here is an example: "4,3,0,1:6,5,1-3,7-". It specifies to use fields 4,3,0 and 1 for keys,
|
|
and use fields 6,5,1,2,3,7 and above for values.
|
|
|
|
The reduce output field list spec is under attribute "reduce.output.key.value.fields.spec".
|
|
|
|
The reducer extracts output key/value pairs in a similar manner, except that
|
|
the key is never ignored.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.FieldSelectionMapReduce -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.HashPartitioner -->
|
|
<class name="HashPartitioner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Partitioner<K2, V2>"/>
|
|
<constructor name="HashPartitioner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="getPartition" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K2"/>
|
|
<param name="value" type="V2"/>
|
|
<param name="numReduceTasks" type="int"/>
|
|
<doc>
|
|
<![CDATA[Use {@link Object#hashCode()} to partition.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Partition keys by their {@link Object#hashCode()}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.HashPartitioner -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.IdentityMapper -->
|
|
<class name="IdentityMapper" extends="org.apache.hadoop.mapred.MapReduceBase"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K, V, K, V>"/>
|
|
<constructor name="IdentityMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="val" type="V"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K, V>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The identify function. Input key/value pair is written directly to
|
|
output.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Implements the identity function, mapping inputs directly to outputs.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.IdentityMapper -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.IdentityReducer -->
|
|
<class name="IdentityReducer" extends="org.apache.hadoop.mapred.MapReduceBase"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Reducer<K, V, K, V>"/>
|
|
<constructor name="IdentityReducer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="values" type="java.util.Iterator<V>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K, V>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes all keys and values directly to output.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Performs no reduction, writing all input values directly to the output.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.IdentityReducer -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.InputSampler -->
|
|
<class name="InputSampler" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="InputSampler" type="org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="writePartitionFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="sampler" type="org.apache.hadoop.mapred.lib.InputSampler.Sampler<K, V>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a partition file for the given job, using the Sampler provided.
|
|
Queries the sampler for a sample keyset, sorts by the output key
|
|
comparator, selects the keys for each rank, and writes to the destination
|
|
returned from {@link
|
|
org.apache.hadoop.mapred.lib.TotalOrderPartitioner#getPartitionFile}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[Driver for InputSampler from the command line.
|
|
Configures a JobConf instance and calls {@link #writePartitionFile}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Utility for collecting samples and writing a partition file for
|
|
{@link org.apache.hadoop.mapred.lib.TotalOrderPartitioner}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.InputSampler -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.InputSampler.IntervalSampler -->
|
|
<class name="InputSampler.IntervalSampler" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.InputSampler.Sampler<K, V>"/>
|
|
<constructor name="InputSampler.IntervalSampler" type="double"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new IntervalSampler sampling <em>all</em> splits.
|
|
@param freq The frequency with which records will be emitted.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="InputSampler.IntervalSampler" type="double, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new IntervalSampler.
|
|
@param freq The frequency with which records will be emitted.
|
|
@param maxSplitsSampled The maximum number of splits to examine.
|
|
@see #getSample]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getSample" return="K[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inf" type="org.apache.hadoop.mapred.InputFormat<K, V>"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[For each split sampled, emit when the ratio of the number of records
|
|
retained to the total record count is less than the specified
|
|
frequency.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Sample from s splits at regular intervals.
|
|
Useful for sorted data.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.InputSampler.IntervalSampler -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.InputSampler.RandomSampler -->
|
|
<class name="InputSampler.RandomSampler" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.InputSampler.Sampler<K, V>"/>
|
|
<constructor name="InputSampler.RandomSampler" type="double, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new RandomSampler sampling <em>all</em> splits.
|
|
This will read every split at the client, which is very expensive.
|
|
@param freq Probability with which a key will be chosen.
|
|
@param numSamples Total number of samples to obtain from all selected
|
|
splits.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="InputSampler.RandomSampler" type="double, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new RandomSampler.
|
|
@param freq Probability with which a key will be chosen.
|
|
@param numSamples Total number of samples to obtain from all selected
|
|
splits.
|
|
@param maxSplitsSampled The maximum number of splits to examine.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getSample" return="K[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inf" type="org.apache.hadoop.mapred.InputFormat<K, V>"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Randomize the split order, then take the specified number of keys from
|
|
each split sampled, where each key is selected with the specified
|
|
probability and possibly replaced by a subsequently selected key when
|
|
the quota of keys from that split is satisfied.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Sample from random points in the input.
|
|
General-purpose sampler. Takes numSamples / maxSplitsSampled inputs from
|
|
each split.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.InputSampler.RandomSampler -->
|
|
<!-- start interface org.apache.hadoop.mapred.lib.InputSampler.Sampler -->
|
|
<interface name="InputSampler.Sampler" abstract="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getSample" return="K[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inf" type="org.apache.hadoop.mapred.InputFormat<K, V>"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[For a given job, collect and return a subset of the keys from the
|
|
input data.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface to sample using an {@link org.apache.hadoop.mapred.InputFormat}.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.lib.InputSampler.Sampler -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.InputSampler.SplitSampler -->
|
|
<class name="InputSampler.SplitSampler" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.InputSampler.Sampler<K, V>"/>
|
|
<constructor name="InputSampler.SplitSampler" type="int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a SplitSampler sampling <em>all</em> splits.
|
|
Takes the first numSamples / numSplits records from each split.
|
|
@param numSamples Total number of samples to obtain from all selected
|
|
splits.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="InputSampler.SplitSampler" type="int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a new SplitSampler.
|
|
@param numSamples Total number of samples to obtain from all selected
|
|
splits.
|
|
@param maxSplitsSampled The maximum number of splits to examine.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getSample" return="K[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inf" type="org.apache.hadoop.mapred.InputFormat<K, V>"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[From each split sampled, take the first numSamples / numSplits records.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Samples the first n records from s splits.
|
|
Inexpensive way to sample random data.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.InputSampler.SplitSampler -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.InverseMapper -->
|
|
<class name="InverseMapper" extends="org.apache.hadoop.mapred.MapReduceBase"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K, V, V, K>"/>
|
|
<constructor name="InverseMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<V, K>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[The inverse function. Input keys and values are swapped.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Mapper} that swaps keys and values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.InverseMapper -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.KeyFieldBasedComparator -->
|
|
<class name="KeyFieldBasedComparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<constructor name="KeyFieldBasedComparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This comparator implementation provides a subset of the features provided
|
|
by the Unix/GNU Sort. In particular, the supported features are:
|
|
-n, (Sort numerically)
|
|
-r, (Reverse the result of comparison)
|
|
-k pos1[,pos2], where pos is of the form f[.c][opts], where f is the number
|
|
of the field to use, and c is the number of the first character from the
|
|
beginning of the field. Fields and character posns are numbered starting
|
|
with 1; a character position of zero in pos2 indicates the field's last
|
|
character. If '.c' is omitted from pos1, it defaults to 1 (the beginning
|
|
of the field); if omitted from pos2, it defaults to 0 (the end of the
|
|
field). opts are ordering options (any of 'nr' as described above).
|
|
We assume that the fields in the key are separated by
|
|
map.output.key.field.separator.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.KeyFieldBasedComparator -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner -->
|
|
<class name="KeyFieldBasedPartitioner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Partitioner<K2, V2>"/>
|
|
<constructor name="KeyFieldBasedPartitioner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="getPartition" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K2"/>
|
|
<param name="value" type="V2"/>
|
|
<param name="numReduceTasks" type="int"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="end" type="int"/>
|
|
<param name="currentHash" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Defines a way to partition keys based on certain key fields (also see
|
|
{@link KeyFieldBasedComparator}.
|
|
The key specification supported is of the form -k pos1[,pos2], where,
|
|
pos is of the form f[.c][opts], where f is the number
|
|
of the key field to use, and c is the number of the first character from
|
|
the beginning of the field. Fields and character posns are numbered
|
|
starting with 1; a character position of zero in pos2 indicates the
|
|
field's last character. If '.c' is omitted from pos1, it defaults to 1
|
|
(the beginning of the field); if omitted from pos2, it defaults to 0
|
|
(the end of the field).]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.LongSumReducer -->
|
|
<class name="LongSumReducer" extends="org.apache.hadoop.mapred.MapReduceBase"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Reducer<K, org.apache.hadoop.io.LongWritable, K, org.apache.hadoop.io.LongWritable>"/>
|
|
<constructor name="LongSumReducer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="values" type="java.util.Iterator<org.apache.hadoop.io.LongWritable>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K, org.apache.hadoop.io.LongWritable>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Reducer} that sums long values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.LongSumReducer -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.MultipleInputs -->
|
|
<class name="MultipleInputs" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MultipleInputs"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="addInputPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="inputFormatClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.InputFormat>"/>
|
|
<doc>
|
|
<![CDATA[Add a {@link Path} with a custom {@link InputFormat} to the list of
|
|
inputs for the map-reduce job.
|
|
|
|
@param conf The configuration of the job
|
|
@param path {@link Path} to be added to the list of inputs for the job
|
|
@param inputFormatClass {@link InputFormat} class to use for this path]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addInputPath"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="path" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="inputFormatClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.InputFormat>"/>
|
|
<param name="mapperClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.Mapper>"/>
|
|
<doc>
|
|
<![CDATA[Add a {@link Path} with a custom {@link InputFormat} and
|
|
{@link Mapper} to the list of inputs for the map-reduce job.
|
|
|
|
@param conf The configuration of the job
|
|
@param path {@link Path} to be added to the list of inputs for the job
|
|
@param inputFormatClass {@link InputFormat} class to use for this path
|
|
@param mapperClass {@link Mapper} class to use for this path]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class supports MapReduce jobs that have multiple input paths with
|
|
a different {@link InputFormat} and {@link Mapper} for each path]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.MultipleInputs -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.MultipleOutputFormat -->
|
|
<class name="MultipleOutputFormat" extends="org.apache.hadoop.mapred.FileOutputFormat<K, V>"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MultipleOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="arg3" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a composite record writer that can write key/value data to different
|
|
output files
|
|
|
|
@param fs
|
|
the file system to use
|
|
@param job
|
|
the job conf for the job
|
|
@param name
|
|
the leaf file name for the output file (such as part-00000")
|
|
@param arg3
|
|
a progressable for reporting progress.
|
|
@return a composite record writer
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="generateLeafFileName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Generate the leaf name for the output file name. The default behavior does
|
|
not change the leaf file name (such as part-00000)
|
|
|
|
@param name
|
|
the leaf file name for the output file
|
|
@return the given leaf file name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="generateFileNameForKeyValue" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Generate the file output file name based on the given key and the leaf file
|
|
name. The default behavior is that the file name does not depend on the
|
|
key.
|
|
|
|
@param key
|
|
the key of the output data
|
|
@param name
|
|
the leaf file name
|
|
@return generated file name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="generateActualKey" return="K"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<doc>
|
|
<![CDATA[Generate the actual key from the given key/value. The default behavior is that
|
|
the actual key is equal to the given key
|
|
|
|
@param key
|
|
the key of the output data
|
|
@param value
|
|
the value of the output data
|
|
@return the actual key derived from the given key/value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="generateActualValue" return="V"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="V"/>
|
|
<doc>
|
|
<![CDATA[Generate the actual value from the given key and value. The default behavior is that
|
|
the actual value is equal to the given value
|
|
|
|
@param key
|
|
the key of the output data
|
|
@param value
|
|
the value of the output data
|
|
@return the actual value derived from the given key/value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInputFileBasedOutputFileName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Generate the outfile name based on a given anme and the input file name. If
|
|
the map input file does not exists (i.e. this is not for a map only job),
|
|
the given name is returned unchanged. If the config value for
|
|
"num.of.trailing.legs.to.use" is not set, or set 0 or negative, the given
|
|
name is returned unchanged. Otherwise, return a file name consisting of the
|
|
N trailing legs of the input file name where N is the config value for
|
|
"num.of.trailing.legs.to.use".
|
|
|
|
@param job
|
|
the job config
|
|
@param name
|
|
the output file name
|
|
@return the outfile name based on a given anme and the input file name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBaseRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="arg3" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@param fs
|
|
the file system to use
|
|
@param job
|
|
a job conf object
|
|
@param name
|
|
the name of the file over which a record writer object will be
|
|
constructed
|
|
@param arg3
|
|
a progressable object
|
|
@return A RecordWriter object over the given file
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This abstract class extends the FileOutputFormat, allowing to write the
|
|
output data to different output files. There are three basic use cases for
|
|
this class.
|
|
|
|
Case one: This class is used for a map reduce job with at least one reducer.
|
|
The reducer wants to write data to different files depending on the actual
|
|
keys. It is assumed that a key (or value) encodes the actual key (value)
|
|
and the desired location for the actual key (value).
|
|
|
|
Case two: This class is used for a map only job. The job wants to use an
|
|
output file name that is either a part of the input file name of the input
|
|
data, or some derivation of it.
|
|
|
|
Case three: This class is used for a map only job. The job wants to use an
|
|
output file name that depends on both the keys and the input file name,]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.MultipleOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.MultipleOutputs -->
|
|
<class name="MultipleOutputs" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MultipleOutputs" type="org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates and initializes multiple named outputs support, it should be
|
|
instantiated in the Mapper/Reducer configure method.
|
|
|
|
@param job the job configuration object]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getNamedOutputsList" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Returns list of channel names.
|
|
|
|
@param conf job conf
|
|
@return List of channel Names]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isMultiNamedOutput" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns if a named output is multiple.
|
|
|
|
@param conf job conf
|
|
@param namedOutput named output
|
|
@return <code>true</code> if the name output is multi, <code>false</code>
|
|
if it is single. If the name output is not defined it returns
|
|
<code>false</code>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNamedOutputFormatClass" return="java.lang.Class<? extends org.apache.hadoop.mapred.OutputFormat>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns the named output OutputFormat.
|
|
|
|
@param conf job conf
|
|
@param namedOutput named output
|
|
@return namedOutput OutputFormat]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNamedOutputKeyClass" return="java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns the key class for a named output.
|
|
|
|
@param conf job conf
|
|
@param namedOutput named output
|
|
@return class for the named output key]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNamedOutputValueClass" return="java.lang.Class<? extends org.apache.hadoop.io.Writable>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns the value class for a named output.
|
|
|
|
@param conf job conf
|
|
@param namedOutput named output
|
|
@return class of named output value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addNamedOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<param name="outputFormatClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.OutputFormat>"/>
|
|
<param name="keyClass" type="java.lang.Class<?>"/>
|
|
<param name="valueClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Adds a named output for the job.
|
|
<p/>
|
|
|
|
@param conf job conf to add the named output
|
|
@param namedOutput named output name, it has to be a word, letters
|
|
and numbers only, cannot be the word 'part' as
|
|
that is reserved for the
|
|
default output.
|
|
@param outputFormatClass OutputFormat class.
|
|
@param keyClass key class
|
|
@param valueClass value class]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addMultiNamedOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<param name="outputFormatClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.OutputFormat>"/>
|
|
<param name="keyClass" type="java.lang.Class<?>"/>
|
|
<param name="valueClass" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Adds a multi named output for the job.
|
|
<p/>
|
|
|
|
@param conf job conf to add the named output
|
|
@param namedOutput named output name, it has to be a word, letters
|
|
and numbers only, cannot be the word 'part' as
|
|
that is reserved for the
|
|
default output.
|
|
@param outputFormatClass OutputFormat class.
|
|
@param keyClass key class
|
|
@param valueClass value class]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCountersEnabled"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="enabled" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Enables or disables counters for the named outputs.
|
|
<p/>
|
|
By default these counters are disabled.
|
|
<p/>
|
|
MultipleOutputs supports counters, by default the are disabled.
|
|
The counters group is the {@link MultipleOutputs} class name.
|
|
</p>
|
|
The names of the counters are the same as the named outputs. For multi
|
|
named outputs the name of the counter is the concatenation of the named
|
|
output, and underscore '_' and the multiname.
|
|
|
|
@param conf job conf to enableadd the named output.
|
|
@param enabled indicates if the counters will be enabled or not.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCountersEnabled" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Returns if the counters for the named outputs are enabled or not.
|
|
<p/>
|
|
By default these counters are disabled.
|
|
<p/>
|
|
MultipleOutputs supports counters, by default the are disabled.
|
|
The counters group is the {@link MultipleOutputs} class name.
|
|
</p>
|
|
The names of the counters are the same as the named outputs. For multi
|
|
named outputs the name of the counter is the concatenation of the named
|
|
output, and underscore '_' and the multiname.
|
|
|
|
|
|
@param conf job conf to enableadd the named output.
|
|
@return TRUE if the counters are enabled, FALSE if they are disabled.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNamedOutputs" return="java.util.Iterator<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns iterator with the defined name outputs.
|
|
|
|
@return iterator with the defined named outputs]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCollector" return="org.apache.hadoop.mapred.OutputCollector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Gets the output collector for a named output.
|
|
<p/>
|
|
|
|
@param namedOutput the named output name
|
|
@param reporter the reporter
|
|
@return the output collector for the given named output
|
|
@throws IOException thrown if output collector could not be created]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCollector" return="org.apache.hadoop.mapred.OutputCollector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="namedOutput" type="java.lang.String"/>
|
|
<param name="multiName" type="java.lang.String"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Gets the output collector for a multi named output.
|
|
<p/>
|
|
|
|
@param namedOutput the named output name
|
|
@param multiName the multi name part
|
|
@param reporter the reporter
|
|
@return the output collector for the given named output
|
|
@throws IOException thrown if output collector could not be created]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Closes all the opened named outputs.
|
|
<p/>
|
|
If overriden subclasses must invoke <code>super.close()</code> at the
|
|
end of their <code>close()</code>
|
|
|
|
@throws java.io.IOException thrown if any of the MultipleOutput files
|
|
could not be closed properly.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The MultipleOutputs class simplifies writting to additional outputs other
|
|
than the job default output via the <code>OutputCollector</code> passed to
|
|
the <code>map()</code> and <code>reduce()</code> methods of the
|
|
<code>Mapper</code> and <code>Reducer</code> implementations.
|
|
<p/>
|
|
Each additional output, or named output, may be configured with its own
|
|
<code>OutputFormat</code>, with its own key class and with its own value
|
|
class.
|
|
<p/>
|
|
A named output can be a single file or a multi file. The later is refered as
|
|
a multi named output.
|
|
<p/>
|
|
A multi named output is an unbound set of files all sharing the same
|
|
<code>OutputFormat</code>, key class and value class configuration.
|
|
<p/>
|
|
When named outputs are used within a <code>Mapper</code> implementation,
|
|
key/values written to a name output are not part of the reduce phase, only
|
|
key/values written to the job <code>OutputCollector</code> are part of the
|
|
reduce phase.
|
|
<p/>
|
|
MultipleOutputs supports counters, by default the are disabled. The counters
|
|
group is the {@link MultipleOutputs} class name.
|
|
</p>
|
|
The names of the counters are the same as the named outputs. For multi
|
|
named outputs the name of the counter is the concatenation of the named
|
|
output, and underscore '_' and the multiname.
|
|
<p/>
|
|
Job configuration usage pattern is:
|
|
<pre>
|
|
|
|
JobConf conf = new JobConf();
|
|
|
|
conf.setInputPath(inDir);
|
|
FileOutputFormat.setOutputPath(conf, outDir);
|
|
|
|
conf.setMapperClass(MOMap.class);
|
|
conf.setReducerClass(MOReduce.class);
|
|
...
|
|
|
|
// Defines additional single text based output 'text' for the job
|
|
MultipleOutputs.addNamedOutput(conf, "text", TextOutputFormat.class,
|
|
LongWritable.class, Text.class);
|
|
|
|
// Defines additional multi sequencefile based output 'sequence' for the
|
|
// job
|
|
MultipleOutputs.addMultiNamedOutput(conf, "seq",
|
|
SequenceFileOutputFormat.class,
|
|
LongWritable.class, Text.class);
|
|
...
|
|
|
|
JobClient jc = new JobClient();
|
|
RunningJob job = jc.submitJob(conf);
|
|
|
|
...
|
|
</pre>
|
|
<p/>
|
|
Job configuration usage pattern is:
|
|
<pre>
|
|
|
|
public class MOReduce implements
|
|
Reducer<WritableComparable, Writable> {
|
|
private MultipleOutputs mos;
|
|
|
|
public void configure(JobConf conf) {
|
|
...
|
|
mos = new MultipleOutputs(conf);
|
|
}
|
|
|
|
public void reduce(WritableComparable key, Iterator<Writable> values,
|
|
OutputCollector output, Reporter reporter)
|
|
throws IOException {
|
|
...
|
|
mos.getCollector("text", reporter).collect(key, new Text("Hello"));
|
|
mos.getCollector("seq", "A", reporter).collect(key, new Text("Bye"));
|
|
mos.getCollector("seq", "B", reporter).collect(key, new Text("Chau"));
|
|
...
|
|
}
|
|
|
|
public void close() throws IOException {
|
|
mos.close();
|
|
...
|
|
}
|
|
|
|
}
|
|
</pre>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.MultipleOutputs -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat -->
|
|
<class name="MultipleSequenceFileOutputFormat" extends="org.apache.hadoop.mapred.lib.MultipleOutputFormat<K, V>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MultipleSequenceFileOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getBaseRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="arg3" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class extends the MultipleOutputFormat, allowing to write the output data
|
|
to different output files in sequence file output format.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.MultipleTextOutputFormat -->
|
|
<class name="MultipleTextOutputFormat" extends="org.apache.hadoop.mapred.lib.MultipleOutputFormat<K, V>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MultipleTextOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getBaseRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="fs" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="arg3" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class extends the MultipleOutputFormat, allowing to write the output
|
|
data to different output files in Text output format.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.MultipleTextOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.MultithreadedMapRunner -->
|
|
<class name="MultithreadedMapRunner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.MapRunnable<K1, V1, K2, V2>"/>
|
|
<constructor name="MultithreadedMapRunner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jobConf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="org.apache.hadoop.mapred.RecordReader<K1, V1>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<K2, V2>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Multithreaded implementation for @link org.apache.hadoop.mapred.MapRunnable.
|
|
<p>
|
|
It can be used instead of the default implementation,
|
|
@link org.apache.hadoop.mapred.MapRunner, when the Map operation is not CPU
|
|
bound in order to improve throughput.
|
|
<p>
|
|
Map implementations using this MapRunnable must be thread-safe.
|
|
<p>
|
|
The Map-Reduce job has to be configured to use this MapRunnable class (using
|
|
the JobConf.setMapRunnerClass method) and
|
|
the number of thread the thread-pool can use with the
|
|
<code>mapred.map.multithreadedrunner.threads</code> property, its default
|
|
value is 10 threads.
|
|
<p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.MultithreadedMapRunner -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.NLineInputFormat -->
|
|
<class name="NLineInputFormat" extends="org.apache.hadoop.mapred.FileInputFormat<org.apache.hadoop.io.LongWritable, org.apache.hadoop.io.Text>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<constructor name="NLineInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.LongWritable, org.apache.hadoop.io.Text>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="genericSplit" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getSplits" return="org.apache.hadoop.mapred.InputSplit[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="numSplits" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Logically splits the set of input files for the job, splits N lines
|
|
of the input as one split.
|
|
|
|
@see org.apache.hadoop.mapred.FileInputFormat#getSplits(JobConf, int)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[NLineInputFormat which splits N lines of input as one split.
|
|
|
|
In many "pleasantly" parallel applications, each process/mapper
|
|
processes the same input file (s), but with computations are
|
|
controlled by different parameters.(Referred to as "parameter sweeps").
|
|
One way to achieve this, is to specify a set of parameters
|
|
(one set per line) as input in a control file
|
|
(which is the input path to the map-reduce application,
|
|
where as the input dataset is specified
|
|
via a config variable in JobConf.).
|
|
|
|
The NLineInputFormat can be used in such applications, that splits
|
|
the input file such that by default, one line is fed as
|
|
a value to one map task, and key is the offset.
|
|
i.e. (k,v) is (LongWritable, Text).
|
|
The location hints will span the whole mapred cluster.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.NLineInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.NullOutputFormat -->
|
|
<class name="NullOutputFormat" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.OutputFormat<K, V>"/>
|
|
<constructor name="NullOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
</method>
|
|
<method name="checkOutputSpecs"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ignored" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Consume all outputs and put them in /dev/null.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.NullOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.RegexMapper -->
|
|
<class name="RegexMapper" extends="org.apache.hadoop.mapred.MapReduceBase"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, org.apache.hadoop.io.LongWritable>"/>
|
|
<constructor name="RegexMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="org.apache.hadoop.io.Text"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.LongWritable>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Mapper} that extracts text matching a regular expression.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.RegexMapper -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.TokenCountMapper -->
|
|
<class name="TokenCountMapper" extends="org.apache.hadoop.mapred.MapReduceBase"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, org.apache.hadoop.io.LongWritable>"/>
|
|
<constructor name="TokenCountMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K"/>
|
|
<param name="value" type="org.apache.hadoop.io.Text"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.LongWritable>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Mapper} that maps text values into <token,freq> pairs. Uses
|
|
{@link StringTokenizer} to break text into tokens.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.TokenCountMapper -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.TotalOrderPartitioner -->
|
|
<class name="TotalOrderPartitioner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Partitioner<K, V>"/>
|
|
<constructor name="TotalOrderPartitioner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Read in the partition file and build indexing data structures.
|
|
If the keytype is {@link org.apache.hadoop.io.BinaryComparable} and
|
|
<tt>total.order.partitioner.natural.order</tt> is not false, a trie
|
|
of the first <tt>total.order.partitioner.max.trie.depth</tt>(2) + 1 bytes
|
|
will be built. Otherwise, keys will be located using a binary search of
|
|
the partition keyset using the {@link org.apache.hadoop.io.RawComparator}
|
|
defined for this job. The input file must be sorted with the same
|
|
comparator and contain {@link
|
|
org.apache.hadoop.mapred.JobConf#getNumReduceTasks} - 1 keys.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPartition" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="V"/>
|
|
<param name="numPartitions" type="int"/>
|
|
</method>
|
|
<method name="setPartitionFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="p" type="org.apache.hadoop.fs.Path"/>
|
|
<doc>
|
|
<![CDATA[Set the path to the SequenceFile storing the sorted partition keyset.
|
|
It must be the case that for <tt>R</tt> reduces, there are <tt>R-1</tt>
|
|
keys in the SequenceFile.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPartitionFile" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the path to the SequenceFile storing the sorted partition keyset.
|
|
@see #setPartitionFile(JobConf,Path)]]>
|
|
</doc>
|
|
</method>
|
|
<field name="DEFAULT_PATH" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Partitioner effecting a total order by reading split points from
|
|
an externally generated source.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.TotalOrderPartitioner -->
|
|
</package>
|
|
<package name="org.apache.hadoop.mapred.lib.aggregate">
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.DoubleValueSum -->
|
|
<class name="DoubleValueSum" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="DoubleValueSum"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
an object whose string representation represents a double value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="double"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
a double value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSum" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return an array of one element. The element is a string
|
|
representation of the aggregated value. The return value is
|
|
expected to be used by the a combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that sums up a sequence of double
|
|
values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.DoubleValueSum -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.LongValueMax -->
|
|
<class name="LongValueMax" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="LongValueMax"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[the default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
an object whose string representation represents a long value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newVal" type="long"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param newVal
|
|
a long value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getVal" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return an array of one element. The element is a string
|
|
representation of the aggregated value. The return value is
|
|
expected to be used by the a combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that maintain the maximum of
|
|
a sequence of long values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.LongValueMax -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.LongValueMin -->
|
|
<class name="LongValueMin" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="LongValueMin"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[the default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
an object whose string representation represents a long value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newVal" type="long"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param newVal
|
|
a long value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getVal" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return an array of one element. The element is a string
|
|
representation of the aggregated value. The return value is
|
|
expected to be used by the a combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that maintain the minimum of
|
|
a sequence of long values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.LongValueMin -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.LongValueSum -->
|
|
<class name="LongValueSum" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="LongValueSum"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[the default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
an object whose string representation represents a long value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="long"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
a long value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSum" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return an array of one element. The element is a string
|
|
representation of the aggregated value. The return value is
|
|
expected to be used by the a combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that sums up
|
|
a sequence of long values.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.LongValueSum -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.StringValueMax -->
|
|
<class name="StringValueMax" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="StringValueMax"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[the default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
a string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getVal" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return an array of one element. The element is a string
|
|
representation of the aggregated value. The return value is
|
|
expected to be used by the a combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that maintain the biggest of
|
|
a sequence of strings.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.StringValueMax -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.StringValueMin -->
|
|
<class name="StringValueMin" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="StringValueMin"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[the default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
a string.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getVal" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of the aggregated value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return an array of one element. The element is a string
|
|
representation of the aggregated value. The return value is
|
|
expected to be used by the a combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that maintain the smallest of
|
|
a sequence of strings.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.StringValueMin -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.UniqValueCount -->
|
|
<class name="UniqValueCount" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="UniqValueCount"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[the default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="UniqValueCount" type="long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[constructor
|
|
@param maxNum the limit in the number of unique values to keep.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setMaxItems" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="n" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the limit on the number of unique values
|
|
@param n the desired limit on the number of unique values
|
|
@return the new limit on the number of unique values]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val
|
|
an object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return the number of unique objects aggregated]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUniqueItems" return="java.util.Set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the set of the unique objects]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return return an array of the unique objects. The return value is
|
|
expected to be used by the a combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that dedupes a sequence of objects.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.UniqValueCount -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor -->
|
|
<class name="UserDefinedValueAggregatorDescriptor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor"/>
|
|
<constructor name="UserDefinedValueAggregatorDescriptor" type="java.lang.String, org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@param className the class name of the user defined descriptor class
|
|
@param job a configure object used for decriptor configuration]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="createInstance" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="className" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Create an instance of the given class
|
|
@param className the name of the class
|
|
@return a dynamically created instance of the given class]]>
|
|
</doc>
|
|
</method>
|
|
<method name="generateKeyValPairs" return="java.util.ArrayList<java.util.Map.Entry<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Generate a list of aggregation-id/value pairs for the given key/value pairs
|
|
by delegating the invocation to the real object.
|
|
|
|
@param key
|
|
input key
|
|
@param val
|
|
input value
|
|
@return a list of aggregation id/value pairs. An aggregation id encodes an
|
|
aggregation type which is used to guide the way to aggregate the
|
|
value in the reduce/combiner phrase of an Aggregate based job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of this object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Do nothing.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a wrapper for a user defined value aggregator descriptor.
|
|
It servs two functions: One is to create an object of ValueAggregatorDescriptor from the
|
|
name of a user defined class that may be dynamically loaded. The other is to
|
|
deligate inviokations of generateKeyValPairs function to the created object.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor -->
|
|
<!-- start interface org.apache.hadoop.mapred.lib.aggregate.ValueAggregator -->
|
|
<interface name="ValueAggregator" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add a value to the aggregator
|
|
|
|
@param val the value to be added]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of the agregator]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return an array of values as the outputs of the combiner.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This interface defines the minimal protocol for value aggregators.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.lib.aggregate.ValueAggregator -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor -->
|
|
<class name="ValueAggregatorBaseDescriptor" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor"/>
|
|
<constructor name="ValueAggregatorBaseDescriptor"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="generateEntry" return="java.util.Map.Entry<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="type" type="java.lang.String"/>
|
|
<param name="id" type="java.lang.String"/>
|
|
<param name="val" type="org.apache.hadoop.io.Text"/>
|
|
<doc>
|
|
<![CDATA[@param type the aggregation type
|
|
@param id the aggregation id
|
|
@param val the val associated with the id to be aggregated
|
|
@return an Entry whose key is the aggregation id prefixed with
|
|
the aggregation type.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="generateValueAggregator" return="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="type" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[@param type the aggregation type
|
|
@return a value aggregator of the given type.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="generateKeyValPairs" return="java.util.ArrayList<java.util.Map.Entry<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Generate 1 or 2 aggregation-id/value pairs for the given key/value pair.
|
|
The first id will be of type LONG_VALUE_SUM, with "record_count" as
|
|
its aggregation id. If the input is a file split,
|
|
the second id of the same type will be generated too, with the file name
|
|
as its aggregation id. This achieves the behavior of counting the total number
|
|
of records in the input data, and the number of records in each input file.
|
|
|
|
@param key
|
|
input key
|
|
@param val
|
|
input value
|
|
@return a list of aggregation id/value pairs. An aggregation id encodes an
|
|
aggregation type which is used to guide the way to aggregate the
|
|
value in the reduce/combiner phrase of an Aggregate based job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[get the input file name.
|
|
|
|
@param job a job configuration object]]>
|
|
</doc>
|
|
</method>
|
|
<field name="UNIQ_VALUE_COUNT" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LONG_VALUE_SUM" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DOUBLE_VALUE_SUM" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="VALUE_HISTOGRAM" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LONG_VALUE_MAX" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LONG_VALUE_MIN" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="STRING_VALUE_MAX" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="STRING_VALUE_MIN" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="inputFile" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This class implements the common functionalities of
|
|
the subclasses of ValueAggregatorDescriptor class.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner -->
|
|
<class name="ValueAggregatorCombiner" extends="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase<K1, V1>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ValueAggregatorCombiner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Combiner does not need to configure.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Text"/>
|
|
<param name="values" type="java.util.Iterator<org.apache.hadoop.io.Text>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Combines values for a given key.
|
|
@param key the key is expected to be a Text object, whose prefix indicates
|
|
the type of aggregation to aggregate the values.
|
|
@param values the values to combine
|
|
@param output to collect combined values]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Do nothing.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="arg0" type="K1 extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="arg1" type="V1 extends org.apache.hadoop.io.Writable"/>
|
|
<param name="arg2" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="arg3" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Do nothing. Should not be called.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements the generic combiner of Aggregate.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner -->
|
|
<!-- start interface org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor -->
|
|
<interface name="ValueAggregatorDescriptor" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="generateKeyValPairs" return="java.util.ArrayList<java.util.Map.Entry<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="java.lang.Object"/>
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Generate a list of aggregation-id/value pairs for the given key/value pair.
|
|
This function is usually called by the mapper of an Aggregate based job.
|
|
|
|
@param key
|
|
input key
|
|
@param val
|
|
input value
|
|
@return a list of aggregation id/value pairs. An aggregation id encodes an
|
|
aggregation type which is used to guide the way to aggregate the
|
|
value in the reduce/combiner phrase of an Aggregate based job.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Configure the object
|
|
|
|
@param job
|
|
a JobConf object that may contain the information that can be used
|
|
to configure the object.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="TYPE_SEPARATOR" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="ONE" type="org.apache.hadoop.io.Text"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This interface defines the contract a value aggregator descriptor must
|
|
support. Such a descriptor can be configured with a JobConf object. Its main
|
|
function is to generate a list of aggregation-id/value pairs. An aggregation
|
|
id encodes an aggregation type which is used to guide the way to aggregate
|
|
the value in the reduce/combiner phrase of an Aggregate based job.The mapper in
|
|
an Aggregate based map/reduce job may create one or more of
|
|
ValueAggregatorDescriptor objects at configuration time. For each input
|
|
key/value pair, the mapper will use those objects to create aggregation
|
|
id/value pairs.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob -->
|
|
<class name="ValueAggregatorJob" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ValueAggregatorJob"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="createValueAggregatorJobs" return="org.apache.hadoop.mapred.jobcontrol.JobControl"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<param name="descriptors" type="java.lang.Class[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createValueAggregatorJobs" return="org.apache.hadoop.mapred.jobcontrol.JobControl"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createValueAggregatorJob" return="org.apache.hadoop.mapred.JobConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create an Aggregate based map/reduce job.
|
|
|
|
@param args the arguments used for job creation. Generic hadoop
|
|
arguments are accepted.
|
|
@return a JobConf object ready for submission.
|
|
|
|
@throws IOException
|
|
@see GenericOptionsParser]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createValueAggregatorJob" return="org.apache.hadoop.mapred.JobConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<param name="descriptors" type="java.lang.Class[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="setAggregatorDescriptors"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="descriptors" type="java.lang.Class[]"/>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[create and run an Aggregate based map/reduce job.
|
|
|
|
@param args the arguments used for job creation
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This is the main class for creating a map/reduce job using Aggregate
|
|
framework. The Aggregate is a specialization of map/reduce framework,
|
|
specilizing for performing various simple aggregations.
|
|
|
|
Generally speaking, in order to implement an application using Map/Reduce
|
|
model, the developer is to implement Map and Reduce functions (and possibly
|
|
combine function). However, a lot of applications related to counting and
|
|
statistics computing have very similar characteristics. Aggregate abstracts
|
|
out the general patterns of these functions and implementing those patterns.
|
|
In particular, the package provides generic mapper/redducer/combiner classes,
|
|
and a set of built-in value aggregators, and a generic utility class that
|
|
helps user create map/reduce jobs using the generic class. The built-in
|
|
aggregators include:
|
|
|
|
sum over numeric values count the number of distinct values compute the
|
|
histogram of values compute the minimum, maximum, media,average, standard
|
|
deviation of numeric values
|
|
|
|
The developer using Aggregate will need only to provide a plugin class
|
|
conforming to the following interface:
|
|
|
|
public interface ValueAggregatorDescriptor { public ArrayList<Entry>
|
|
generateKeyValPairs(Object key, Object value); public void
|
|
configure(JobConfjob); }
|
|
|
|
The package also provides a base class, ValueAggregatorBaseDescriptor,
|
|
implementing the above interface. The user can extend the base class and
|
|
implement generateKeyValPairs accordingly.
|
|
|
|
The primary work of generateKeyValPairs is to emit one or more key/value
|
|
pairs based on the input key/value pair. The key in an output key/value pair
|
|
encode two pieces of information: aggregation type and aggregation id. The
|
|
value will be aggregated onto the aggregation id according the aggregation
|
|
type.
|
|
|
|
This class offers a function to generate a map/reduce job using Aggregate
|
|
framework. The function takes the following parameters: input directory spec
|
|
input format (text or sequence file) output directory a file specifying the
|
|
user plugin class]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase -->
|
|
<class name="ValueAggregatorJobBase" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K1, V1, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<implements name="org.apache.hadoop.mapred.Reducer<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<constructor name="ValueAggregatorJobBase"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="logSpec"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<field name="aggregatorDescriptorList" type="java.util.ArrayList<org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor>"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This abstract class implements some common functionalities of the
|
|
the generic mapper, reducer and combiner classes of Aggregate.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper -->
|
|
<class name="ValueAggregatorMapper" extends="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase<K1, V1>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ValueAggregatorMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K1 extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="V1 extends org.apache.hadoop.io.Writable"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[the map function. It iterates through the value aggregator descriptor
|
|
list to generate aggregation id/value pairs and emit them.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="arg0" type="org.apache.hadoop.io.Text"/>
|
|
<param name="arg1" type="java.util.Iterator<org.apache.hadoop.io.Text>"/>
|
|
<param name="arg2" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="arg3" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Do nothing. Should not be called.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements the generic mapper of Aggregate.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer -->
|
|
<class name="ValueAggregatorReducer" extends="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase<K1, V1>"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ValueAggregatorReducer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="reduce"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.Text"/>
|
|
<param name="values" type="java.util.Iterator<org.apache.hadoop.io.Text>"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@param key
|
|
the key is expected to be a Text object, whose prefix indicates
|
|
the type of aggregation to aggregate the values. In effect, data
|
|
driven computing is achieved. It is assumed that each aggregator's
|
|
getReport method emits appropriate output for the aggregator. This
|
|
may be further customiized.
|
|
@value the values to be aggregated]]>
|
|
</doc>
|
|
</method>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="arg0" type="K1 extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="arg1" type="V1 extends org.apache.hadoop.io.Writable"/>
|
|
<param name="arg2" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>"/>
|
|
<param name="arg3" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Do nothing. Should not be called]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements the generic reducer of Aggregate.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.aggregate.ValueHistogram -->
|
|
<class name="ValueHistogram" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregator"/>
|
|
<constructor name="ValueHistogram"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="addNextValue"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[add the given val to the aggregator.
|
|
|
|
@param val the value to be added. It is expected to be a string
|
|
in the form of xxxx\tnum, meaning xxxx has num occurrences.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReport" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the string representation of this aggregator.
|
|
It includes the following basic statistics of the histogram:
|
|
the number of unique values
|
|
the minimum value
|
|
the media value
|
|
the maximum value
|
|
the average value
|
|
the standard deviation]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReportDetails" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return a string representation of the list of value/frequence pairs of
|
|
the histogram]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCombinerOutput" return="java.util.ArrayList"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return a list value/frequence pairs.
|
|
The return value is expected to be used by the reducer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getReportItems" return="java.util.TreeMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return a TreeMap representation of the histogram]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[reset the aggregator]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements a value aggregator that computes the
|
|
histogram of a sequence of strings.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.aggregate.ValueHistogram -->
|
|
</package>
|
|
<package name="org.apache.hadoop.mapred.lib.db">
|
|
<!-- start class org.apache.hadoop.mapred.lib.db.DBConfiguration -->
|
|
<class name="DBConfiguration" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="configureDB"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="driverClass" type="java.lang.String"/>
|
|
<param name="dbUrl" type="java.lang.String"/>
|
|
<param name="userName" type="java.lang.String"/>
|
|
<param name="passwd" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets the DB access related fields in the JobConf.
|
|
@param job the job
|
|
@param driverClass JDBC Driver class name
|
|
@param dbUrl JDBC DB access URL.
|
|
@param userName DB access username
|
|
@param passwd DB access passwd]]>
|
|
</doc>
|
|
</method>
|
|
<method name="configureDB"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="driverClass" type="java.lang.String"/>
|
|
<param name="dbUrl" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets the DB access related fields in the JobConf.
|
|
@param job the job
|
|
@param driverClass JDBC Driver class name
|
|
@param dbUrl JDBC DB access URL.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="DRIVER_CLASS_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The JDBC Driver class name]]>
|
|
</doc>
|
|
</field>
|
|
<field name="URL_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[JDBC Database access URL]]>
|
|
</doc>
|
|
</field>
|
|
<field name="USERNAME_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[User name to access the database]]>
|
|
</doc>
|
|
</field>
|
|
<field name="PASSWORD_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Password to access the database]]>
|
|
</doc>
|
|
</field>
|
|
<field name="INPUT_TABLE_NAME_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Input table name]]>
|
|
</doc>
|
|
</field>
|
|
<field name="INPUT_FIELD_NAMES_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Field names in the Input table]]>
|
|
</doc>
|
|
</field>
|
|
<field name="INPUT_CONDITIONS_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[WHERE clause in the input SELECT statement]]>
|
|
</doc>
|
|
</field>
|
|
<field name="INPUT_ORDER_BY_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[ORDER BY clause in the input SELECT statement]]>
|
|
</doc>
|
|
</field>
|
|
<field name="INPUT_QUERY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Whole input query, exluding LIMIT...OFFSET]]>
|
|
</doc>
|
|
</field>
|
|
<field name="INPUT_COUNT_QUERY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Input query to get the count of records]]>
|
|
</doc>
|
|
</field>
|
|
<field name="INPUT_CLASS_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Class name implementing DBWritable which will hold input tuples]]>
|
|
</doc>
|
|
</field>
|
|
<field name="OUTPUT_TABLE_NAME_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Output table name]]>
|
|
</doc>
|
|
</field>
|
|
<field name="OUTPUT_FIELD_NAMES_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Field names in the Output table]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A container for configuration property names for jobs with DB input/output.
|
|
<br>
|
|
The job can be configured using the static methods in this class,
|
|
{@link DBInputFormat}, and {@link DBOutputFormat}.
|
|
<p>
|
|
Alternatively, the properties can be set in the configuration with proper
|
|
values.
|
|
|
|
@see DBConfiguration#configureDB(JobConf, String, String, String, String)
|
|
@see DBInputFormat#setInput(JobConf, Class, String, String)
|
|
@see DBInputFormat#setInput(JobConf, Class, String, String, String, String...)
|
|
@see DBOutputFormat#setOutput(JobConf, String, String...)]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.db.DBConfiguration -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.db.DBInputFormat -->
|
|
<class name="DBInputFormat" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputFormat<org.apache.hadoop.io.LongWritable, T>"/>
|
|
<implements name="org.apache.hadoop.mapred.JobConfigurable"/>
|
|
<constructor name="DBInputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordReader" return="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.LongWritable, T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="split" type="org.apache.hadoop.mapred.InputSplit"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSplits" return="org.apache.hadoop.mapred.InputSplit[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="chunks" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCountQuery" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the query for getting the total number of rows,
|
|
subclasses can override this for custom behaviour.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="inputClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.lib.db.DBWritable>"/>
|
|
<param name="tableName" type="java.lang.String"/>
|
|
<param name="conditions" type="java.lang.String"/>
|
|
<param name="orderBy" type="java.lang.String"/>
|
|
<param name="fieldNames" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Initializes the map-part of the job with the appropriate input settings.
|
|
|
|
@param job The job
|
|
@param inputClass the class object implementing DBWritable, which is the
|
|
Java object holding tuple fields.
|
|
@param tableName The table to read data from
|
|
@param conditions The condition which to select data with, eg. '(updated >
|
|
20070101 AND length > 0)'
|
|
@param orderBy the fieldNames in the orderBy clause.
|
|
@param fieldNames The field names in the table
|
|
@see #setInput(JobConf, Class, String, String)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="inputClass" type="java.lang.Class<? extends org.apache.hadoop.mapred.lib.db.DBWritable>"/>
|
|
<param name="inputQuery" type="java.lang.String"/>
|
|
<param name="inputCountQuery" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Initializes the map-part of the job with the appropriate input settings.
|
|
|
|
@param job The job
|
|
@param inputClass the class object implementing DBWritable, which is the
|
|
Java object holding tuple fields.
|
|
@param inputQuery the input query to select fields. Example :
|
|
"SELECT f1, f2, f3 FROM Mytable ORDER BY f1"
|
|
@param inputCountQuery the input query that returns the number of records in
|
|
the table.
|
|
Example : "SELECT COUNT(f1) FROM Mytable"
|
|
@see #setInput(JobConf, Class, String, String, String, String...)]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A InputFormat that reads input data from an SQL table.
|
|
<p>
|
|
DBInputFormat emits LongWritables containing the record number as
|
|
key and DBWritables as value.
|
|
|
|
The SQL query, and input class can be using one of the two
|
|
setInput methods.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.db.DBInputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.db.DBInputFormat.DBInputSplit -->
|
|
<class name="DBInputFormat.DBInputSplit" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.InputSplit"/>
|
|
<constructor name="DBInputFormat.DBInputSplit"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default Constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="DBInputFormat.DBInputSplit" type="long, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Convenience Constructor
|
|
@param start the index of the first row to select
|
|
@param end the index of the last row to select]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getLocations" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStart" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return The index of the first row to select]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getEnd" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return The index of the last row to select]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLength" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[@return The total row count in this split]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="input" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="output" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A InputSplit that spans a set of rows]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.db.DBInputFormat.DBInputSplit -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.db.DBInputFormat.DBRecordReader -->
|
|
<class name="DBInputFormat.DBRecordReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.LongWritable, T>"/>
|
|
<constructor name="DBInputFormat.DBRecordReader" type="org.apache.hadoop.mapred.lib.db.DBInputFormat.DBInputSplit, java.lang.Class<T>, org.apache.hadoop.mapred.JobConf"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="SQLException" type="java.sql.SQLException"/>
|
|
<doc>
|
|
<![CDATA[@param split The InputSplit to read data for
|
|
@throws SQLException]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getSelectQuery" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the query for selecting the records,
|
|
subclasses can override this for custom behaviour.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createKey" return="org.apache.hadoop.io.LongWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createValue" return="T extends org.apache.hadoop.mapred.lib.db.DBWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPos" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProgress" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="next" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="org.apache.hadoop.io.LongWritable"/>
|
|
<param name="value" type="T extends org.apache.hadoop.mapred.lib.db.DBWritable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A RecordReader that reads records from a SQL table.
|
|
Emits LongWritables containing the record number as
|
|
key and DBWritables as value.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.db.DBInputFormat.DBRecordReader -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable -->
|
|
<class name="DBInputFormat.NullDBWritable" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.lib.db.DBWritable"/>
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="DBInputFormat.NullDBWritable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="arg0" type="java.sql.ResultSet"/>
|
|
<exception name="SQLException" type="java.sql.SQLException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="arg0" type="java.sql.PreparedStatement"/>
|
|
<exception name="SQLException" type="java.sql.SQLException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A Class that does nothing, implementing DBWritable]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.db.DBOutputFormat -->
|
|
<class name="DBOutputFormat" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.OutputFormat<K, V>"/>
|
|
<constructor name="DBOutputFormat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="constructQuery" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="table" type="java.lang.String"/>
|
|
<param name="fieldNames" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Constructs the query used as the prepared statement to insert data.
|
|
|
|
@param table
|
|
the table to insert into
|
|
@param fieldNames
|
|
the fields to insert into. If field names are unknown, supply an
|
|
array of nulls.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checkOutputSpecs"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="filesystem" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRecordWriter" return="org.apache.hadoop.mapred.RecordWriter<K, V>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="filesystem" type="org.apache.hadoop.fs.FileSystem"/>
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="progress" type="org.apache.hadoop.util.Progressable"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="tableName" type="java.lang.String"/>
|
|
<param name="fieldNames" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Initializes the reduce-part of the job with the appropriate output settings
|
|
|
|
@param job
|
|
The job
|
|
@param tableName
|
|
The table to insert data into
|
|
@param fieldNames
|
|
The field names in the table. If unknown, supply the appropriate
|
|
number of nulls.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A OutputFormat that sends the reduce output to a SQL table.
|
|
<p>
|
|
{@link DBOutputFormat} accepts <key,value> pairs, where
|
|
key has a type extending DBWritable. Returned {@link RecordWriter}
|
|
writes <b>only the key</b> to the database with a batch SQL query.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.db.DBOutputFormat -->
|
|
<!-- start class org.apache.hadoop.mapred.lib.db.DBOutputFormat.DBRecordWriter -->
|
|
<class name="DBOutputFormat.DBRecordWriter" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.RecordWriter<K, V>"/>
|
|
<constructor name="DBOutputFormat.DBRecordWriter" type="java.sql.Connection, java.sql.PreparedStatement"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="SQLException" type="java.sql.SQLException"/>
|
|
</constructor>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.mapred.lib.db.DBWritable"/>
|
|
<param name="value" type="V"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A RecordWriter that writes the reduce output to a SQL table]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.lib.db.DBOutputFormat.DBRecordWriter -->
|
|
<!-- start interface org.apache.hadoop.mapred.lib.db.DBWritable -->
|
|
<interface name="DBWritable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="statement" type="java.sql.PreparedStatement"/>
|
|
<exception name="SQLException" type="java.sql.SQLException"/>
|
|
<doc>
|
|
<![CDATA[Sets the fields of the object in the {@link PreparedStatement}.
|
|
@param statement the statement that the fields are put into.
|
|
@throws SQLException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="resultSet" type="java.sql.ResultSet"/>
|
|
<exception name="SQLException" type="java.sql.SQLException"/>
|
|
<doc>
|
|
<![CDATA[Reads the fields of the object from the {@link ResultSet}.
|
|
@param resultSet the {@link ResultSet} to get the fields from.
|
|
@throws SQLException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Objects that are read from/written to a database should implement
|
|
<code>DBWritable</code>. DBWritable, is similar to {@link Writable}
|
|
except that the {@link #write(PreparedStatement)} method takes a
|
|
{@link PreparedStatement}, and {@link #readFields(ResultSet)}
|
|
takes a {@link ResultSet}.
|
|
<p>
|
|
Implementations are responsible for writing the fields of the object
|
|
to PreparedStatement, and reading the fields of the object from the
|
|
ResultSet.
|
|
|
|
<p>Example:</p>
|
|
If we have the following table in the database :
|
|
<pre>
|
|
CREATE TABLE MyTable (
|
|
counter INTEGER NOT NULL,
|
|
timestamp BIGINT NOT NULL,
|
|
);
|
|
</pre>
|
|
then we can read/write the tuples from/to the table with :
|
|
<p><pre>
|
|
public class MyWritable implements Writable, DBWritable {
|
|
// Some data
|
|
private int counter;
|
|
private long timestamp;
|
|
|
|
//Writable#write() implementation
|
|
public void write(DataOutput out) throws IOException {
|
|
out.writeInt(counter);
|
|
out.writeLong(timestamp);
|
|
}
|
|
|
|
//Writable#readFields() implementation
|
|
public void readFields(DataInput in) throws IOException {
|
|
counter = in.readInt();
|
|
timestamp = in.readLong();
|
|
}
|
|
|
|
public void write(PreparedStatement statement) throws SQLException {
|
|
statement.setInt(1, counter);
|
|
statement.setLong(2, timestamp);
|
|
}
|
|
|
|
public void readFields(ResultSet resultSet) throws SQLException {
|
|
counter = resultSet.getInt(1);
|
|
timestamp = resultSet.getLong(2);
|
|
}
|
|
}
|
|
</pre></p>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.mapred.lib.db.DBWritable -->
|
|
</package>
|
|
<package name="org.apache.hadoop.mapred.pipes">
|
|
<!-- start class org.apache.hadoop.mapred.pipes.Submitter -->
|
|
<class name="Submitter" extends="org.apache.hadoop.conf.Configured"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="Submitter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Submitter" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getExecutable" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Get the URI of the application's executable.
|
|
@param conf
|
|
@return the URI where the application's executable is located]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setExecutable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="executable" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the URI for the application's executable. Normally this is a hdfs:
|
|
location.
|
|
@param conf
|
|
@param executable The URI of the application's executable.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setIsJavaRecordReader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="value" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether the job is using a Java RecordReader.
|
|
@param conf the configuration to modify
|
|
@param value the new value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getIsJavaRecordReader" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Check whether the job is using a Java RecordReader
|
|
@param conf the configuration to check
|
|
@return is it a Java RecordReader?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setIsJavaMapper"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="value" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether the Mapper is written in Java.
|
|
@param conf the configuration to modify
|
|
@param value the new value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getIsJavaMapper" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Check whether the job is using a Java Mapper.
|
|
@param conf the configuration to check
|
|
@return is it a Java Mapper?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setIsJavaReducer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="value" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether the Reducer is written in Java.
|
|
@param conf the configuration to modify
|
|
@param value the new value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getIsJavaReducer" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Check whether the job is using a Java Reducer.
|
|
@param conf the configuration to check
|
|
@return is it a Java Reducer?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setIsJavaRecordWriter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="value" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether the job will use a Java RecordWriter.
|
|
@param conf the configuration to modify
|
|
@param value the new value to set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getIsJavaRecordWriter" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Will the reduce use a Java RecordWriter?
|
|
@param conf the configuration to check
|
|
@return true, if the output of the job will be written by Java]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getKeepCommandFile" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<doc>
|
|
<![CDATA[Does the user want to keep the command file for debugging? If this is
|
|
true, pipes will write a copy of the command data to a file in the
|
|
task directory named "downlink.data", which may be used to run the C++
|
|
program under the debugger. You probably also want to set
|
|
JobConf.setKeepFailedTaskFiles(true) to keep the entire directory from
|
|
being deleted.
|
|
To run using the data file, set the environment variable
|
|
"hadoop.pipes.command.file" to point to the file.
|
|
@param conf the configuration to check
|
|
@return will the framework save the command file?]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setKeepCommandFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<param name="keep" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set whether to keep the command file for debugging
|
|
@param conf the configuration to modify
|
|
@param keep the new value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="submitJob" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="Use {@link Submitter#runJob(JobConf)}">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Submit a job to the map/reduce cluster. All of the necessary modifications
|
|
to the job to run under pipes are made to the configuration.
|
|
@param conf the job to submit to the cluster (MODIFIED)
|
|
@throws IOException
|
|
@deprecated Use {@link Submitter#runJob(JobConf)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="runJob" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Submit a job to the map/reduce cluster. All of the necessary modifications
|
|
to the job to run under pipes are made to the configuration.
|
|
@param conf the job to submit to the cluster (MODIFIED)
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="jobSubmit" return="org.apache.hadoop.mapred.RunningJob"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.mapred.JobConf"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Submit a job to the Map-Reduce framework.
|
|
This returns a handle to the {@link RunningJob} which can be used to track
|
|
the running-job.
|
|
|
|
@param conf the job configuration.
|
|
@return a handle to the {@link RunningJob} which can be used to track the
|
|
running-job.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[Submit a pipes job based on the command line arguments.
|
|
@param args]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[The main entry point and job submitter. It may either be used as a command
|
|
line-based or API-based method to launch Pipes jobs.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.mapred.pipes.Submitter -->
|
|
</package>
|
|
<package name="org.apache.hadoop.metrics">
|
|
<!-- start class org.apache.hadoop.metrics.ContextFactory -->
|
|
<class name="ContextFactory" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ContextFactory"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of ContextFactory]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getAttribute" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="attributeName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns the value of the named attribute, or null if there is no
|
|
attribute of that name.
|
|
|
|
@param attributeName the attribute name
|
|
@return the attribute value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAttributeNames" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the names of all the factory's attributes.
|
|
|
|
@return the attribute names]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setAttribute"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="attributeName" type="java.lang.String"/>
|
|
<param name="value" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Sets the named factory attribute to the specified value, creating it
|
|
if it did not already exist. If the value is null, this is the same as
|
|
calling removeAttribute.
|
|
|
|
@param attributeName the attribute name
|
|
@param value the new attribute value]]>
|
|
</doc>
|
|
</method>
|
|
<method name="removeAttribute"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="attributeName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Removes the named attribute if it exists.
|
|
|
|
@param attributeName the attribute name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getContext" return="org.apache.hadoop.metrics.MetricsContext"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="ClassNotFoundException" type="java.lang.ClassNotFoundException"/>
|
|
<exception name="InstantiationException" type="java.lang.InstantiationException"/>
|
|
<exception name="IllegalAccessException" type="java.lang.IllegalAccessException"/>
|
|
<doc>
|
|
<![CDATA[Returns the named MetricsContext instance, constructing it if necessary
|
|
using the factory's current configuration attributes. <p/>
|
|
|
|
When constructing the instance, if the factory property
|
|
<i>contextName</i>.class</code> exists,
|
|
its value is taken to be the name of the class to instantiate. Otherwise,
|
|
the default is to create an instance of
|
|
<code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a
|
|
dummy "no-op" context which will cause all metric data to be discarded.
|
|
|
|
@param contextName the name of the context
|
|
@return the named MetricsContext]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNullContext" return="org.apache.hadoop.metrics.MetricsContext"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns a "null" context - one which does nothing.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFactory" return="org.apache.hadoop.metrics.ContextFactory"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns the singleton ContextFactory instance, constructing it if
|
|
necessary. <p/>
|
|
|
|
When the instance is constructed, this method checks if the file
|
|
<code>hadoop-metrics.properties</code> exists on the class path. If it
|
|
exists, it must be in the format defined by java.util.Properties, and all
|
|
the properties in the file are set as attributes on the newly created
|
|
ContextFactory instance.
|
|
|
|
@return the singleton ContextFactory instance]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Factory class for creating MetricsContext objects. To obtain an instance
|
|
of this class, use the static <code>getFactory()</code> method.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.ContextFactory -->
|
|
<!-- start interface org.apache.hadoop.metrics.MetricsContext -->
|
|
<interface name="MetricsContext" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getContextName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the context name.
|
|
|
|
@return the context name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startMonitoring"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Starts or restarts monitoring, the emitting of metrics records as they are
|
|
updated.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stopMonitoring"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Stops monitoring. This does not free any data that the implementation
|
|
may have buffered for sending at the next timer event. It
|
|
is OK to call <code>startMonitoring()</code> again after calling
|
|
this.
|
|
@see #close()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isMonitoring" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if monitoring is currently in progress.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Stops monitoring and also frees any buffered data, returning this
|
|
object to its initial state.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createRecord" return="org.apache.hadoop.metrics.MetricsRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Creates a new MetricsRecord instance with the given <code>recordName</code>.
|
|
Throws an exception if the metrics implementation is configured with a fixed
|
|
set of record names and <code>recordName</code> is not in that set.
|
|
|
|
@param recordName the name of the record
|
|
@throws MetricsException if recordName conflicts with configuration data]]>
|
|
</doc>
|
|
</method>
|
|
<method name="registerUpdater"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="updater" type="org.apache.hadoop.metrics.Updater"/>
|
|
<doc>
|
|
<![CDATA[Registers a callback to be called at regular time intervals, as
|
|
determined by the implementation-class specific configuration.
|
|
|
|
@param updater object to be run periodically; it should updated
|
|
some metrics records and then return]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unregisterUpdater"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="updater" type="org.apache.hadoop.metrics.Updater"/>
|
|
<doc>
|
|
<![CDATA[Removes a callback, if it exists.
|
|
|
|
@param updater object to be removed from the callback list]]>
|
|
</doc>
|
|
</method>
|
|
<field name="DEFAULT_PERIOD" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default period in seconds at which data is sent to the metrics system.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[The main interface to the metrics package.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.metrics.MetricsContext -->
|
|
<!-- start class org.apache.hadoop.metrics.MetricsException -->
|
|
<class name="MetricsException" extends="java.lang.RuntimeException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MetricsException"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of MetricsException]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="MetricsException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of MetricsException
|
|
|
|
@param message an error message]]>
|
|
</doc>
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[General-purpose, unchecked metrics exception.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.MetricsException -->
|
|
<!-- start interface org.apache.hadoop.metrics.MetricsRecord -->
|
|
<interface name="MetricsRecord" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getRecordName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the record name.
|
|
|
|
@return the record name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value. The tagValue may be null,
|
|
which is treated the same as an empty String.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="short"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="removeTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Removes any tag of the specified name.
|
|
|
|
@param tagName name of a tag]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="short"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="float"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="short"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="float"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="update"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Updates the table of buffered data which is to be sent periodically.
|
|
If the tag values match an existing row, that row is updated;
|
|
otherwise, a new row is added.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Removes, from the buffered data table, all rows having tags
|
|
that equal the tags that have been set on this record. For example,
|
|
if there are no tags on this record, all rows for this record name
|
|
would be removed. Or, if there is a single tag on this record, then
|
|
just rows containing a tag with the same name and value would be removed.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A named and optionally tagged set of records to be sent to the metrics
|
|
system. <p/>
|
|
|
|
A record name identifies the kind of data to be reported. For example, a
|
|
program reporting statistics relating to the disks on a computer might use
|
|
a record name "diskStats".<p/>
|
|
|
|
A record has zero or more <i>tags</i>. A tag has a name and a value. To
|
|
continue the example, the "diskStats" record might use a tag named
|
|
"diskName" to identify a particular disk. Sometimes it is useful to have
|
|
more than one tag, so there might also be a "diskType" with value "ide" or
|
|
"scsi" or whatever.<p/>
|
|
|
|
A record also has zero or more <i>metrics</i>. These are the named
|
|
values that are to be reported to the metrics system. In the "diskStats"
|
|
example, possible metric names would be "diskPercentFull", "diskPercentBusy",
|
|
"kbReadPerSecond", etc.<p/>
|
|
|
|
The general procedure for using a MetricsRecord is to fill in its tag and
|
|
metric values, and then call <code>update()</code> to pass the record to the
|
|
client library.
|
|
Metric data is not immediately sent to the metrics system
|
|
each time that <code>update()</code> is called.
|
|
An internal table is maintained, identified by the record name. This
|
|
table has columns
|
|
corresponding to the tag and the metric names, and rows
|
|
corresponding to each unique set of tag values. An update
|
|
either modifies an existing row in the table, or adds a new row with a set of
|
|
tag values that are different from all the other rows. Note that if there
|
|
are no tags, then there can be at most one row in the table. <p/>
|
|
|
|
Once a row is added to the table, its data will be sent to the metrics system
|
|
on every timer period, whether or not it has been updated since the previous
|
|
timer period. If this is inappropriate, for example if metrics were being
|
|
reported by some transient object in an application, the <code>remove()</code>
|
|
method can be used to remove the row and thus stop the data from being
|
|
sent.<p/>
|
|
|
|
Note that the <code>update()</code> method is atomic. This means that it is
|
|
safe for different threads to be updating the same metric. More precisely,
|
|
it is OK for different threads to call <code>update()</code> on MetricsRecord instances
|
|
with the same set of tag names and tag values. Different threads should
|
|
<b>not</b> use the same MetricsRecord instance at the same time.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.metrics.MetricsRecord -->
|
|
<!-- start class org.apache.hadoop.metrics.MetricsUtil -->
|
|
<class name="MetricsUtil" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getContext" return="org.apache.hadoop.metrics.MetricsContext"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Utility method to return the named context.
|
|
If the desired context cannot be created for any reason, the exception
|
|
is logged, and a null context is returned.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createRecord" return="org.apache.hadoop.metrics.MetricsRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.metrics.MetricsContext"/>
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Utility method to create and return new metrics record instance within the
|
|
given context. This record is tagged with the host name.
|
|
|
|
@param context the context
|
|
@param recordName name of the record
|
|
@return newly created metrics record]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Utility class to simplify creation and reporting of hadoop metrics.
|
|
|
|
For examples of usage, see NameNodeMetrics.
|
|
@see org.apache.hadoop.metrics.MetricsRecord
|
|
@see org.apache.hadoop.metrics.MetricsContext
|
|
@see org.apache.hadoop.metrics.ContextFactory]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.MetricsUtil -->
|
|
<!-- start interface org.apache.hadoop.metrics.Updater -->
|
|
<interface name="Updater" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="doUpdates"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.metrics.MetricsContext"/>
|
|
<doc>
|
|
<![CDATA[Timer-based call-back from the metric library.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Call-back interface. See <code>MetricsContext.registerUpdater()</code>.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.metrics.Updater -->
|
|
</package>
|
|
<package name="org.apache.hadoop.metrics.file">
|
|
<!-- start class org.apache.hadoop.metrics.file.FileContext -->
|
|
<class name="FileContext" extends="org.apache.hadoop.metrics.spi.AbstractMetricsContext"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="FileContext"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of FileContext]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="init"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="factory" type="org.apache.hadoop.metrics.ContextFactory"/>
|
|
</method>
|
|
<method name="getFileName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the configured file name, or null.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startMonitoring"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Starts or restarts monitoring, by opening in append-mode, the
|
|
file specified by the <code>fileName</code> attribute,
|
|
if specified. Otherwise the data will be written to standard
|
|
output.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stopMonitoring"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Stops monitoring, closing the file.
|
|
@see #close()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="emitRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<param name="outRec" type="org.apache.hadoop.metrics.spi.OutputRecord"/>
|
|
<doc>
|
|
<![CDATA[Emits a metrics record to a file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="flush"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Flushes the output writer, forcing updates to disk.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="FILE_NAME_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="PERIOD_PROPERTY" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Metrics context for writing metrics to a file.<p/>
|
|
|
|
This class is configured by setting ContextFactory attributes which in turn
|
|
are usually configured through a properties file. All the attributes are
|
|
prefixed by the contextName. For example, the properties file might contain:
|
|
<pre>
|
|
myContextName.fileName=/tmp/metrics.log
|
|
myContextName.period=5
|
|
</pre>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.file.FileContext -->
|
|
</package>
|
|
<package name="org.apache.hadoop.metrics.ganglia">
|
|
<!-- start class org.apache.hadoop.metrics.ganglia.GangliaContext -->
|
|
<class name="GangliaContext" extends="org.apache.hadoop.metrics.spi.AbstractMetricsContext"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="GangliaContext"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of GangliaContext]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="init"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="factory" type="org.apache.hadoop.metrics.ContextFactory"/>
|
|
</method>
|
|
<method name="emitRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<param name="outRec" type="org.apache.hadoop.metrics.spi.OutputRecord"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Context for sending metrics to Ganglia.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.ganglia.GangliaContext -->
|
|
</package>
|
|
<package name="org.apache.hadoop.metrics.jvm">
|
|
<!-- start class org.apache.hadoop.metrics.jvm.EventCounter -->
|
|
<class name="EventCounter" extends="org.apache.log4j.AppenderSkeleton"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="EventCounter"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getFatal" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getError" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getWarn" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getInfo" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="event" type="org.apache.log4j.spi.LoggingEvent"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="requiresLayout" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A log4J Appender that simply counts logging events in three levels:
|
|
fatal, error and warn.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.jvm.EventCounter -->
|
|
<!-- start class org.apache.hadoop.metrics.jvm.JvmMetrics -->
|
|
<class name="JvmMetrics" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.metrics.Updater"/>
|
|
<method name="init" return="org.apache.hadoop.metrics.jvm.JvmMetrics"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="processName" type="java.lang.String"/>
|
|
<param name="sessionId" type="java.lang.String"/>
|
|
</method>
|
|
<method name="doUpdates"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="context" type="org.apache.hadoop.metrics.MetricsContext"/>
|
|
<doc>
|
|
<![CDATA[This will be called periodically (with the period being configuration
|
|
dependent).]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Singleton class which eports Java Virtual Machine metrics to the metrics API.
|
|
Any application can create an instance of this class in order to emit
|
|
Java VM metrics.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.jvm.JvmMetrics -->
|
|
</package>
|
|
<package name="org.apache.hadoop.metrics.spi">
|
|
<!-- start class org.apache.hadoop.metrics.spi.AbstractMetricsContext -->
|
|
<class name="AbstractMetricsContext" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.metrics.MetricsContext"/>
|
|
<constructor name="AbstractMetricsContext"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of AbstractMetricsContext]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="init"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="factory" type="org.apache.hadoop.metrics.ContextFactory"/>
|
|
<doc>
|
|
<![CDATA[Initializes the context.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAttribute" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="attributeName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Convenience method for subclasses to access factory attributes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAttributeTable" return="java.util.Map<java.lang.String, java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="tableName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns an attribute-value map derived from the factory attributes
|
|
by finding all factory attributes that begin with
|
|
<i>contextName</i>.<i>tableName</i>. The returned map consists of
|
|
those attributes with the contextName and tableName stripped off.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getContextName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the context name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getContextFactory" return="org.apache.hadoop.metrics.ContextFactory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the factory by which this context was created.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startMonitoring"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Starts or restarts monitoring, the emitting of metrics records.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stopMonitoring"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Stops monitoring. This does not free buffered data.
|
|
@see #close()]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isMonitoring" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns true if monitoring is currently in progress.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Stops monitoring and frees buffered data, returning this
|
|
object to its initial state.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createRecord" return="org.apache.hadoop.metrics.MetricsRecord"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Creates a new AbstractMetricsRecord instance with the given <code>recordName</code>.
|
|
Throws an exception if the metrics implementation is configured with a fixed
|
|
set of record names and <code>recordName</code> is not in that set.
|
|
|
|
@param recordName the name of the record
|
|
@throws MetricsException if recordName conflicts with configuration data]]>
|
|
</doc>
|
|
</method>
|
|
<method name="newRecord" return="org.apache.hadoop.metrics.spi.MetricsRecordImpl"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Subclasses should override this if they subclass MetricsRecordImpl.
|
|
@param recordName the name of the record
|
|
@return newly created instance of MetricsRecordImpl or subclass]]>
|
|
</doc>
|
|
</method>
|
|
<method name="registerUpdater"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="updater" type="org.apache.hadoop.metrics.Updater"/>
|
|
<doc>
|
|
<![CDATA[Registers a callback to be called at time intervals determined by
|
|
the configuration.
|
|
|
|
@param updater object to be run periodically; it should update
|
|
some metrics records]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unregisterUpdater"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="updater" type="org.apache.hadoop.metrics.Updater"/>
|
|
<doc>
|
|
<![CDATA[Removes a callback, if it exists.
|
|
|
|
@param updater object to be removed from the callback list]]>
|
|
</doc>
|
|
</method>
|
|
<method name="emitRecord"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<param name="outRec" type="org.apache.hadoop.metrics.spi.OutputRecord"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Sends a record to the metrics system.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="flush"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Called each period after all records have been emitted, this method does nothing.
|
|
Subclasses may override it in order to perform some kind of flush.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="update"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="record" type="org.apache.hadoop.metrics.spi.MetricsRecordImpl"/>
|
|
<doc>
|
|
<![CDATA[Called by MetricsRecordImpl.update(). Creates or updates a row in
|
|
the internal table of metric data.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="record" type="org.apache.hadoop.metrics.spi.MetricsRecordImpl"/>
|
|
<doc>
|
|
<![CDATA[Called by MetricsRecordImpl.remove(). Removes all matching rows in
|
|
the internal table of metric data. A row matches if it has the same
|
|
tag names and values as record, but it may also have additional
|
|
tags.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPeriod" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the timer period.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setPeriod"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="period" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the timer period]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The main class of the Service Provider Interface. This class should be
|
|
extended in order to integrate the Metrics API with a specific metrics
|
|
client library. <p/>
|
|
|
|
This class implements the internal table of metric data, and the timer
|
|
on which data is to be sent to the metrics system. Subclasses must
|
|
override the abstract <code>emitRecord</code> method in order to transmit
|
|
the data. <p/>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.spi.AbstractMetricsContext -->
|
|
<!-- start class org.apache.hadoop.metrics.spi.MetricsRecordImpl -->
|
|
<class name="MetricsRecordImpl" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.metrics.MetricsRecord"/>
|
|
<constructor name="MetricsRecordImpl" type="java.lang.String, org.apache.hadoop.metrics.spi.AbstractMetricsContext"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of FileRecord]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getRecordName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the record name.
|
|
|
|
@return the record name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="short"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<param name="tagValue" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Sets the named tag to the specified value.
|
|
|
|
@param tagName name of the tag
|
|
@param tagValue new value of the tag
|
|
@throws MetricsException if the tagName conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="removeTag"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tagName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Removes any tag of the specified name.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="short"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="float"/>
|
|
<doc>
|
|
<![CDATA[Sets the named metric to the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue new value of the metric
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="short"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="incrMetric"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="metricName" type="java.lang.String"/>
|
|
<param name="metricValue" type="float"/>
|
|
<doc>
|
|
<![CDATA[Increments the named metric by the specified value.
|
|
|
|
@param metricName name of the metric
|
|
@param metricValue incremental value
|
|
@throws MetricsException if the metricName or the type of the metricValue
|
|
conflicts with the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="update"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Updates the table of buffered data which is to be sent periodically.
|
|
If the tag values match an existing row, that row is updated;
|
|
otherwise, a new row is added.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Removes the row, if it exists, in the buffered data table having tags
|
|
that equal the tags that have been set on this record.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An implementation of MetricsRecord. Keeps a back-pointer to the context
|
|
from which it was created, and delegates back to it on <code>update</code>
|
|
and <code>remove()</code>.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.spi.MetricsRecordImpl -->
|
|
<!-- start class org.apache.hadoop.metrics.spi.MetricValue -->
|
|
<class name="MetricValue" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MetricValue" type="java.lang.Number, boolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of MetricValue]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="isIncrement" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="isAbsolute" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getNumber" return="java.lang.Number"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="ABSOLUTE" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="INCREMENT" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A Number that is either an absolute or an incremental amount.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.spi.MetricValue -->
|
|
<!-- start class org.apache.hadoop.metrics.spi.NullContext -->
|
|
<class name="NullContext" extends="org.apache.hadoop.metrics.spi.AbstractMetricsContext"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="NullContext"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of NullContext]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="startMonitoring"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Do-nothing version of startMonitoring]]>
|
|
</doc>
|
|
</method>
|
|
<method name="emitRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<param name="outRec" type="org.apache.hadoop.metrics.spi.OutputRecord"/>
|
|
<doc>
|
|
<![CDATA[Do-nothing version of emitRecord]]>
|
|
</doc>
|
|
</method>
|
|
<method name="update"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="record" type="org.apache.hadoop.metrics.spi.MetricsRecordImpl"/>
|
|
<doc>
|
|
<![CDATA[Do-nothing version of update]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="record" type="org.apache.hadoop.metrics.spi.MetricsRecordImpl"/>
|
|
<doc>
|
|
<![CDATA[Do-nothing version of remove]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Null metrics context: a metrics context which does nothing. Used as the
|
|
default context, so that no performance data is emitted if no configuration
|
|
data is found.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.spi.NullContext -->
|
|
<!-- start class org.apache.hadoop.metrics.spi.NullContextWithUpdateThread -->
|
|
<class name="NullContextWithUpdateThread" extends="org.apache.hadoop.metrics.spi.AbstractMetricsContext"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="NullContextWithUpdateThread"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of NullContextWithUpdateThread]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="init"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="factory" type="org.apache.hadoop.metrics.ContextFactory"/>
|
|
</method>
|
|
<method name="emitRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="contextName" type="java.lang.String"/>
|
|
<param name="recordName" type="java.lang.String"/>
|
|
<param name="outRec" type="org.apache.hadoop.metrics.spi.OutputRecord"/>
|
|
<doc>
|
|
<![CDATA[Do-nothing version of emitRecord]]>
|
|
</doc>
|
|
</method>
|
|
<method name="update"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="record" type="org.apache.hadoop.metrics.spi.MetricsRecordImpl"/>
|
|
<doc>
|
|
<![CDATA[Do-nothing version of update]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="record" type="org.apache.hadoop.metrics.spi.MetricsRecordImpl"/>
|
|
<doc>
|
|
<![CDATA[Do-nothing version of remove]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A null context which has a thread calling
|
|
periodically when monitoring is started. This keeps the data sampled
|
|
correctly.
|
|
In all other respects, this is like the NULL context: No data is emitted.
|
|
This is suitable for Monitoring systems like JMX which reads the metrics
|
|
when someone reads the data from JMX.
|
|
|
|
The default impl of start and stop monitoring:
|
|
is the AbstractMetricsContext is good enough.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.spi.NullContextWithUpdateThread -->
|
|
<!-- start class org.apache.hadoop.metrics.spi.OutputRecord -->
|
|
<class name="OutputRecord" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getTagNames" return="java.util.Set<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the set of tag names]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getTag" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns a tag object which is can be a String, Integer, Short or Byte.
|
|
|
|
@return the tag value, or null if there is no such tag]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMetricNames" return="java.util.Set<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the set of metric names.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMetric" return="java.lang.Number"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns the metric object which can be a Float, Integer, Short or Byte.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Represents a record of metric data to be sent to a metrics system.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.spi.OutputRecord -->
|
|
<!-- start class org.apache.hadoop.metrics.spi.Util -->
|
|
<class name="Util" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="parse" return="java.util.List<java.net.InetSocketAddress>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="specs" type="java.lang.String"/>
|
|
<param name="defaultPort" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parses a space and/or comma separated sequence of server specifications
|
|
of the form <i>hostname</i> or <i>hostname:port</i>. If
|
|
the specs string is null, defaults to localhost:defaultPort.
|
|
|
|
@return a list of InetSocketAddress objects.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Static utility methods]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.spi.Util -->
|
|
</package>
|
|
<package name="org.apache.hadoop.metrics.util">
|
|
<!-- start class org.apache.hadoop.metrics.util.MBeanUtil -->
|
|
<class name="MBeanUtil" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MBeanUtil"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="registerMBean" return="javax.management.ObjectName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="serviceName" type="java.lang.String"/>
|
|
<param name="nameName" type="java.lang.String"/>
|
|
<param name="theMbean" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Register the mbean using out standard MBeanName format
|
|
"hadoop.dfs:service=<serviceName>,name=<nameName>"
|
|
Where the <serviceName> and <nameName> are the supplied parameters
|
|
|
|
@param serviceName
|
|
@param nameName
|
|
@param theMbean - the MBean to register
|
|
@return the named used to register the MBean]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unregisterMBean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="mbeanName" type="javax.management.ObjectName"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This util class provides a method to register an MBean using
|
|
our standard naming convention as described in the doc
|
|
for {link {@link #registerMBean(String, String, Object)}]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.util.MBeanUtil -->
|
|
<!-- start class org.apache.hadoop.metrics.util.MetricsIntValue -->
|
|
<class name="MetricsIntValue" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MetricsIntValue" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor - create a new metric
|
|
@param nam the name of the metrics to be used to publish the metric]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newValue" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set the value
|
|
@param newValue]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get value
|
|
@return the value last set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="incr" type="int"/>
|
|
<doc>
|
|
<![CDATA[Inc metrics for incr vlaue
|
|
@param incr - value to be added]]>
|
|
</doc>
|
|
</method>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Inc metrics by one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="dec"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="decr" type="int"/>
|
|
<doc>
|
|
<![CDATA[Inc metrics for incr vlaue
|
|
@param decr - value to subtract]]>
|
|
</doc>
|
|
</method>
|
|
<method name="dec"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Dec metrics by one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="pushMetric"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="mr" type="org.apache.hadoop.metrics.MetricsRecord"/>
|
|
<doc>
|
|
<![CDATA[Push the metric to the mr.
|
|
The metric is pushed only if it was updated since last push
|
|
|
|
Note this does NOT push to JMX
|
|
(JMX gets the info via {@link #get()}
|
|
|
|
@param mr]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The MetricsIntValue class is for a metric that is not time varied
|
|
but changes only when it is set.
|
|
Each time its value is set, it is published only *once* at the next update
|
|
call.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.util.MetricsIntValue -->
|
|
<!-- start class org.apache.hadoop.metrics.util.MetricsLongValue -->
|
|
<class name="MetricsLongValue" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MetricsLongValue" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor - create a new metric
|
|
@param nam the name of the metrics to be used to publish the metric]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newValue" type="long"/>
|
|
<doc>
|
|
<![CDATA[Set the value
|
|
@param newValue]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get value
|
|
@return the value last set]]>
|
|
</doc>
|
|
</method>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="incr" type="long"/>
|
|
<doc>
|
|
<![CDATA[Inc metrics for incr vlaue
|
|
@param incr - value to be added]]>
|
|
</doc>
|
|
</method>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Inc metrics by one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="dec"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="decr" type="long"/>
|
|
<doc>
|
|
<![CDATA[Inc metrics for incr vlaue
|
|
@param decr - value to subtract]]>
|
|
</doc>
|
|
</method>
|
|
<method name="dec"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Dec metrics by one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="pushMetric"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="mr" type="org.apache.hadoop.metrics.MetricsRecord"/>
|
|
<doc>
|
|
<![CDATA[Push the metric to the mr.
|
|
The metric is pushed only if it was updated since last push
|
|
|
|
Note this does NOT push to JMX
|
|
(JMX gets the info via {@link #get()}
|
|
|
|
@param mr]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The MetricsLongValue class is for a metric that is not time varied
|
|
but changes only when it is set.
|
|
Each time its value is set, it is published only *once* at the next update
|
|
call.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.util.MetricsLongValue -->
|
|
<!-- start class org.apache.hadoop.metrics.util.MetricsTimeVaryingInt -->
|
|
<class name="MetricsTimeVaryingInt" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MetricsTimeVaryingInt" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor - create a new metric
|
|
@param nam the name of the metrics to be used to publish the metric]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="incr" type="int"/>
|
|
<doc>
|
|
<![CDATA[Inc metrics for incr vlaue
|
|
@param incr - number of operations]]>
|
|
</doc>
|
|
</method>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Inc metrics by one]]>
|
|
</doc>
|
|
</method>
|
|
<method name="pushMetric"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="mr" type="org.apache.hadoop.metrics.MetricsRecord"/>
|
|
<doc>
|
|
<![CDATA[Push the delta metrics to the mr.
|
|
The delta is since the last push/interval.
|
|
|
|
Note this does NOT push to JMX
|
|
(JMX gets the info via {@link #previousIntervalValue}
|
|
|
|
@param mr]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPreviousIntervalValue" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The Value at the Previous interval
|
|
@return prev interval value]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The MetricsTimeVaryingInt class is for a metric that naturally
|
|
varies over time (e.g. number of files created).
|
|
The metric is is published at interval heart beat (the interval
|
|
is set in the metrics config file).
|
|
Note if one wants a time associated with the metric then use
|
|
@see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.util.MetricsTimeVaryingInt -->
|
|
<!-- start class org.apache.hadoop.metrics.util.MetricsTimeVaryingRate -->
|
|
<class name="MetricsTimeVaryingRate" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MetricsTimeVaryingRate" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor - create a new metric
|
|
@param n the name of the metrics to be used to publish the metric]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="numOps" type="int"/>
|
|
<param name="time" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increment the metrics for numOps operations
|
|
@param numOps - number of operations
|
|
@param time - time for numOps operations]]>
|
|
</doc>
|
|
</method>
|
|
<method name="inc"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="time" type="long"/>
|
|
<doc>
|
|
<![CDATA[Increment the metrics for one operation
|
|
@param time for one operation]]>
|
|
</doc>
|
|
</method>
|
|
<method name="pushMetric"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="mr" type="org.apache.hadoop.metrics.MetricsRecord"/>
|
|
<doc>
|
|
<![CDATA[Push the delta metrics to the mr.
|
|
The delta is since the last push/interval.
|
|
|
|
Note this does NOT push to JMX
|
|
(JMX gets the info via {@link #getPreviousIntervalAverageTime()} and
|
|
{@link #getPreviousIntervalNumOps()}
|
|
|
|
@param mr]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPreviousIntervalNumOps" return="int"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The number of operations in the previous interval
|
|
@return - ops in prev interval]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPreviousIntervalAverageTime" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The average rate of an operation in the previous interval
|
|
@return - the average rate.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMinTime" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The min time for a single operation since the last reset
|
|
{@link #resetMinMax()}
|
|
@return min time for an operation]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMaxTime" return="long"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The max time for a single operation since the last reset
|
|
{@link #resetMinMax()}
|
|
@return max time for an operation]]>
|
|
</doc>
|
|
</method>
|
|
<method name="resetMinMax"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Reset the min max values]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The MetricsTimeVaryingRate class is for a rate based metric that
|
|
naturally varies over time (e.g. time taken to create a file).
|
|
The rate is averaged at each interval heart beat (the interval
|
|
is set in the metrics config file).
|
|
This class also keeps track of the min and max rates along with
|
|
a method to reset the min-max.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.metrics.util.MetricsTimeVaryingRate -->
|
|
</package>
|
|
<package name="org.apache.hadoop.net">
|
|
<!-- start class org.apache.hadoop.net.CachedDNSToSwitchMapping -->
|
|
<class name="CachedDNSToSwitchMapping" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.net.DNSToSwitchMapping"/>
|
|
<constructor name="CachedDNSToSwitchMapping" type="org.apache.hadoop.net.DNSToSwitchMapping"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="resolve" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="names" type="java.util.List<java.lang.String>"/>
|
|
</method>
|
|
<field name="rawMapping" type="org.apache.hadoop.net.DNSToSwitchMapping"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A cached implementation of DNSToSwitchMapping that takes an
|
|
raw DNSToSwitchMapping and stores the resolved network location in
|
|
a cache. The following calls to a resolved network location
|
|
will get its location from the cache.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.CachedDNSToSwitchMapping -->
|
|
<!-- start class org.apache.hadoop.net.DNS -->
|
|
<class name="DNS" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DNS"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="reverseDns" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="hostIp" type="java.net.InetAddress"/>
|
|
<param name="ns" type="java.lang.String"/>
|
|
<exception name="NamingException" type="javax.naming.NamingException"/>
|
|
<doc>
|
|
<![CDATA[Returns the hostname associated with the specified IP address by the
|
|
provided nameserver.
|
|
|
|
@param hostIp
|
|
The address to reverse lookup
|
|
@param ns
|
|
The host name of a reachable DNS server
|
|
@return The host name associated with the provided IP
|
|
@throws NamingException
|
|
If a NamingException is encountered]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getIPs" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="strInterface" type="java.lang.String"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
<doc>
|
|
<![CDATA[Returns all the IPs associated with the provided interface, if any, in
|
|
textual form.
|
|
|
|
@param strInterface
|
|
The name of the network interface to query (e.g. eth0)
|
|
@return A string vector of all the IPs associated with the provided
|
|
interface
|
|
@throws UnknownHostException
|
|
If an UnknownHostException is encountered in querying the
|
|
default interface]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultIP" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="strInterface" type="java.lang.String"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
<doc>
|
|
<![CDATA[Returns the first available IP address associated with the provided
|
|
network interface
|
|
|
|
@param strInterface
|
|
The name of the network interface to query (e.g. eth0)
|
|
@return The IP address in text form
|
|
@throws UnknownHostException
|
|
If one is encountered in querying the default interface]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHosts" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="strInterface" type="java.lang.String"/>
|
|
<param name="nameserver" type="java.lang.String"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
<doc>
|
|
<![CDATA[Returns all the host names associated by the provided nameserver with the
|
|
address bound to the specified network interface
|
|
|
|
@param strInterface
|
|
The name of the network interface to query (e.g. eth0)
|
|
@param nameserver
|
|
The DNS host name
|
|
@return A string vector of all host names associated with the IPs tied to
|
|
the specified interface
|
|
@throws UnknownHostException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHosts" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="strInterface" type="java.lang.String"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
<doc>
|
|
<![CDATA[Returns all the host names associated by the default nameserver with the
|
|
address bound to the specified network interface
|
|
|
|
@param strInterface
|
|
The name of the network interface to query (e.g. eth0)
|
|
@return The list of host names associated with IPs bound to the network
|
|
interface
|
|
@throws UnknownHostException
|
|
If one is encountered while querying the deault interface]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultHost" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="strInterface" type="java.lang.String"/>
|
|
<param name="nameserver" type="java.lang.String"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
<doc>
|
|
<![CDATA[Returns the default (first) host name associated by the provided
|
|
nameserver with the address bound to the specified network interface
|
|
|
|
@param strInterface
|
|
The name of the network interface to query (e.g. eth0)
|
|
@param nameserver
|
|
The DNS host name
|
|
@return The default host names associated with IPs bound to the network
|
|
interface
|
|
@throws UnknownHostException
|
|
If one is encountered while querying the deault interface]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultHost" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="strInterface" type="java.lang.String"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
<doc>
|
|
<![CDATA[Returns the default (first) host name associated by the default
|
|
nameserver with the address bound to the specified network interface
|
|
|
|
@param strInterface
|
|
The name of the network interface to query (e.g. eth0)
|
|
@return The default host name associated with IPs bound to the network
|
|
interface
|
|
@throws UnknownHostException
|
|
If one is encountered while querying the deault interface]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A class that provides direct and reverse lookup functionalities, allowing
|
|
the querying of specific network interfaces or nameservers.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.DNS -->
|
|
<!-- start interface org.apache.hadoop.net.DNSToSwitchMapping -->
|
|
<interface name="DNSToSwitchMapping" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="resolve" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="names" type="java.util.List<java.lang.String>"/>
|
|
<doc>
|
|
<![CDATA[Resolves a list of DNS-names/IP-addresses and returns back a list of
|
|
switch information (network paths). One-to-one correspondence must be
|
|
maintained between the elements in the lists.
|
|
Consider an element in the argument list - x.y.com. The switch information
|
|
that is returned must be a network path of the form /foo/rack,
|
|
where / is the root, and 'foo' is the switch where 'rack' is connected.
|
|
Note the hostname/ip-address is not part of the returned path.
|
|
The network topology of the cluster would determine the number of
|
|
components in the network path.
|
|
@param names
|
|
@return list of resolved network paths]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An interface that should be implemented to allow pluggable
|
|
DNS-name/IP-address to RackID resolvers.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.net.DNSToSwitchMapping -->
|
|
<!-- start class org.apache.hadoop.net.NetUtils -->
|
|
<class name="NetUtils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="NetUtils"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getSocketFactory" return="javax.net.SocketFactory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="clazz" type="java.lang.Class<?>"/>
|
|
<doc>
|
|
<![CDATA[Get the socket factory for the given class according to its
|
|
configuration parameter
|
|
<tt>hadoop.rpc.socket.factory.class.<ClassName></tt>. When no
|
|
such parameter exists then fall back on the default socket factory as
|
|
configured by <tt>hadoop.rpc.socket.factory.class.default</tt>. If
|
|
this default socket factory is not configured, then fall back on the JVM
|
|
default socket factory.
|
|
|
|
@param conf the configuration
|
|
@param clazz the class (usually a {@link VersionedProtocol})
|
|
@return a socket factory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDefaultSocketFactory" return="javax.net.SocketFactory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the default socket factory as specified by the configuration
|
|
parameter <tt>hadoop.rpc.socket.factory.default</tt>
|
|
|
|
@param conf the configuration
|
|
@return the default socket factory as specified in the configuration or
|
|
the JVM default socket factory if the configuration does not
|
|
contain a default socket factory property.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getSocketFactoryFromProperty" return="javax.net.SocketFactory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="propValue" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get the socket factory corresponding to the given proxy URI. If the
|
|
given proxy URI corresponds to an absence of configuration parameter,
|
|
returns null. If the URI is malformed raises an exception.
|
|
|
|
@param propValue the property which is the class name of the
|
|
SocketFactory to instantiate; assumed non null and non empty.
|
|
@return a socket factory as defined in the property value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createSocketAddr" return="java.net.InetSocketAddress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="target" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Util method to build socket addr from either:
|
|
<host>:<post>
|
|
<fs>://<host>:<port>/<path>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="createSocketAddr" return="java.net.InetSocketAddress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="target" type="java.lang.String"/>
|
|
<param name="defaultPort" type="int"/>
|
|
<doc>
|
|
<![CDATA[Util method to build socket addr from either:
|
|
<host>
|
|
<host>:<post>
|
|
<fs>://<host>:<port>/<path>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getServerAddress" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="oldBindAddressName" type="java.lang.String"/>
|
|
<param name="oldPortName" type="java.lang.String"/>
|
|
<param name="newBindAddressName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Handle the transition from pairs of attributes specifying a host and port
|
|
to a single colon separated one.
|
|
@param conf the configuration to check
|
|
@param oldBindAddressName the old address attribute name
|
|
@param oldPortName the old port attribute name
|
|
@param newBindAddressName the new combined name
|
|
@return the complete address from the configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addStaticResolution"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="host" type="java.lang.String"/>
|
|
<param name="resolvedName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Adds a static resolution for host. This can be used for setting up
|
|
hostnames with names that are fake to point to a well known host. For e.g.
|
|
in some testcases we require to have daemons with different hostnames
|
|
running on the same machine. In order to create connections to these
|
|
daemons, one can set up mappings from those hostnames to "localhost".
|
|
{@link NetUtils#getStaticResolution(String)} can be used to query for
|
|
the actual hostname.
|
|
@param host
|
|
@param resolvedName]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStaticResolution" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="host" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Retrieves the resolved name for the passed host. The resolved name must
|
|
have been set earlier using
|
|
{@link NetUtils#addStaticResolution(String, String)}
|
|
@param host
|
|
@return the resolution]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getAllStaticResolutions" return="java.util.List<java.lang.String[]>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This is used to get all the resolutions that were added using
|
|
{@link NetUtils#addStaticResolution(String, String)}. The return
|
|
value is a List each element of which contains an array of String
|
|
of the form String[0]=hostname, String[1]=resolved-hostname
|
|
@return the list of resolutions]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getConnectAddress" return="java.net.InetSocketAddress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="server" type="org.apache.hadoop.ipc.Server"/>
|
|
<doc>
|
|
<![CDATA[Returns InetSocketAddress that a client can use to
|
|
connect to the server. Server.getListenerAddress() is not correct when
|
|
the server binds to "0.0.0.0". This returns "127.0.0.1:port" when
|
|
the getListenerAddress() returns "0.0.0.0:port".
|
|
|
|
@param server
|
|
@return socket address that a client can use to connect to the server.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInputStream" return="java.io.InputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="socket" type="java.net.Socket"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Same as getInputStream(socket, socket.getSoTimeout()).<br><br>
|
|
|
|
From documentation for {@link #getInputStream(Socket, long)}:<br>
|
|
Returns InputStream for the socket. If the socket has an associated
|
|
SocketChannel then it returns a
|
|
{@link SocketInputStream} with the given timeout. If the socket does not
|
|
have a channel, {@link Socket#getInputStream()} is returned. In the later
|
|
case, the timeout argument is ignored and the timeout set with
|
|
{@link Socket#setSoTimeout(int)} applies for reads.<br><br>
|
|
|
|
Any socket created using socket factories returned by {@link #NetUtils},
|
|
must use this interface instead of {@link Socket#getInputStream()}.
|
|
|
|
@see #getInputStream(Socket, long)
|
|
|
|
@param socket
|
|
@return InputStream for reading from the socket.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getInputStream" return="java.io.InputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="socket" type="java.net.Socket"/>
|
|
<param name="timeout" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns InputStream for the socket. If the socket has an associated
|
|
SocketChannel then it returns a
|
|
{@link SocketInputStream} with the given timeout. If the socket does not
|
|
have a channel, {@link Socket#getInputStream()} is returned. In the later
|
|
case, the timeout argument is ignored and the timeout set with
|
|
{@link Socket#setSoTimeout(int)} applies for reads.<br><br>
|
|
|
|
Any socket created using socket factories returned by {@link #NetUtils},
|
|
must use this interface instead of {@link Socket#getInputStream()}.
|
|
|
|
@see Socket#getChannel()
|
|
|
|
@param socket
|
|
@param timeout timeout in milliseconds. This may not always apply. zero
|
|
for waiting as long as necessary.
|
|
@return InputStream for reading from the socket.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputStream" return="java.io.OutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="socket" type="java.net.Socket"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Same as getOutputStream(socket, 0). Timeout of zero implies write will
|
|
wait until data is available.<br><br>
|
|
|
|
From documentation for {@link #getOutputStream(Socket, long)} : <br>
|
|
Returns OutputStream for the socket. If the socket has an associated
|
|
SocketChannel then it returns a
|
|
{@link SocketOutputStream} with the given timeout. If the socket does not
|
|
have a channel, {@link Socket#getOutputStream()} is returned. In the later
|
|
case, the timeout argument is ignored and the write will wait until
|
|
data is available.<br><br>
|
|
|
|
Any socket created using socket factories returned by {@link #NetUtils},
|
|
must use this interface instead of {@link Socket#getOutputStream()}.
|
|
|
|
@see #getOutputStream(Socket, long)
|
|
|
|
@param socket
|
|
@return OutputStream for writing to the socket.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getOutputStream" return="java.io.OutputStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="socket" type="java.net.Socket"/>
|
|
<param name="timeout" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Returns OutputStream for the socket. If the socket has an associated
|
|
SocketChannel then it returns a
|
|
{@link SocketOutputStream} with the given timeout. If the socket does not
|
|
have a channel, {@link Socket#getOutputStream()} is returned. In the later
|
|
case, the timeout argument is ignored and the write will wait until
|
|
data is available.<br><br>
|
|
|
|
Any socket created using socket factories returned by {@link #NetUtils},
|
|
must use this interface instead of {@link Socket#getOutputStream()}.
|
|
|
|
@see Socket#getChannel()
|
|
|
|
@param socket
|
|
@param timeout timeout in milliseconds. This may not always apply. zero
|
|
for waiting as long as necessary.
|
|
@return OutputStream for writing to the socket.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="normalizeHostName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Given a string representation of a host, return its ip address
|
|
in textual presentation.
|
|
|
|
@param name a string representation of a host:
|
|
either a textual representation its IP address or its host name
|
|
@return its IP address in the string format]]>
|
|
</doc>
|
|
</method>
|
|
<method name="normalizeHostNames" return="java.util.List<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="names" type="java.util.Collection<java.lang.String>"/>
|
|
<doc>
|
|
<![CDATA[Given a collection of string representation of hosts, return a list of
|
|
corresponding IP addresses in the textual representation.
|
|
|
|
@param names a collection of string representations of hosts
|
|
@return a list of corresponding IP addresses in the string format
|
|
@see #normalizeHostName(String)]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.NetUtils -->
|
|
<!-- start class org.apache.hadoop.net.NetworkTopology -->
|
|
<class name="NetworkTopology" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="NetworkTopology"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="add"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="node" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Add a leaf node
|
|
Update node counter & rack counter if neccessary
|
|
@param node
|
|
node to be added
|
|
@exception IllegalArgumentException if add a node to a leave
|
|
or node to be added is not a leaf]]>
|
|
</doc>
|
|
</method>
|
|
<method name="remove"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="node" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Remove a node
|
|
Update node counter & rack counter if neccessary
|
|
@param node
|
|
node to be removed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="contains" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="node" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Check if the tree contains node <i>node</i>
|
|
|
|
@param node
|
|
a node
|
|
@return true if <i>node</i> is already in the tree; false otherwise]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNode" return="org.apache.hadoop.net.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="loc" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Given a string representation of a node, return its reference
|
|
|
|
@param loc
|
|
a path-like string representation of a node
|
|
@return a reference to the node; null if the node is not in the tree]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumOfRacks" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the total number of racks]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNumOfLeaves" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the total number of nodes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDistance" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="node1" type="org.apache.hadoop.net.Node"/>
|
|
<param name="node2" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Return the distance between two nodes
|
|
It is assumed that the distance from one node to its parent is 1
|
|
The distance between two nodes is calculated by summing up their distances
|
|
to their closest common ancestor.
|
|
@param node1 one node
|
|
@param node2 another node
|
|
@return the distance between node1 and node2
|
|
node1 or node2 do not belong to the cluster]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isOnSameRack" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="node1" type="org.apache.hadoop.net.Node"/>
|
|
<param name="node2" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Check if two nodes are on the same rack
|
|
@param node1 one node
|
|
@param node2 another node
|
|
@return true if node1 and node2 are pm the same rack; false otherwise
|
|
@exception IllegalArgumentException when either node1 or node2 is null, or
|
|
node1 or node2 do not belong to the cluster]]>
|
|
</doc>
|
|
</method>
|
|
<method name="chooseRandom" return="org.apache.hadoop.net.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="scope" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[randomly choose one node from <i>scope</i>
|
|
if scope starts with ~, choose one from the all nodes except for the
|
|
ones in <i>scope</i>; otherwise, choose one from <i>scope</i>
|
|
@param scope range of nodes from which a node will be choosen
|
|
@return the choosen node]]>
|
|
</doc>
|
|
</method>
|
|
<method name="countNumOfAvailableNodes" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="scope" type="java.lang.String"/>
|
|
<param name="excludedNodes" type="java.util.List<org.apache.hadoop.net.Node>"/>
|
|
<doc>
|
|
<![CDATA[return the number of leaves in <i>scope</i> but not in <i>excludedNodes</i>
|
|
if scope starts with ~, return the number of nodes that are not
|
|
in <i>scope</i> and <i>excludedNodes</i>;
|
|
@param scope a path string that may start with ~
|
|
@param excludedNodes a list of nodes
|
|
@return number of available nodes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[convert a network tree to a string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="pseudoSortByDistance"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="reader" type="org.apache.hadoop.net.Node"/>
|
|
<param name="nodes" type="org.apache.hadoop.net.Node[]"/>
|
|
<doc>
|
|
<![CDATA[Sort nodes array by their distances to <i>reader</i>
|
|
It linearly scans the array, if a local node is found, swap it with
|
|
the first element of the array.
|
|
If a local rack node is found, swap it with the first element following
|
|
the local node.
|
|
If neither local node or local rack node is found, put a random replica
|
|
location at postion 0.
|
|
It leaves the rest nodes untouched.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="DEFAULT_RACK" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DEFAULT_HOST_LEVEL" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[The class represents a cluster of computer with a tree hierarchical
|
|
network topology.
|
|
For example, a cluster may be consists of many data centers filled
|
|
with racks of computers.
|
|
In a network topology, leaves represent data nodes (computers) and inner
|
|
nodes represent switches/routers that manage traffic in/out of data centers
|
|
or racks.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.NetworkTopology -->
|
|
<!-- start interface org.apache.hadoop.net.Node -->
|
|
<interface name="Node" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getNetworkLocation" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the string representation of this node's network location]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setNetworkLocation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="location" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set the node's network location]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getParent" return="org.apache.hadoop.net.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's parent]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setParent"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="parent" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Set this node's parent]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLevel" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's level in the tree.
|
|
E.g. the root of a tree returns 0 and its children return 1]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setLevel"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set this node's level in the tree.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[The interface defines a node in a network topology.
|
|
A node may be a leave representing a data node or an inner
|
|
node representing a datacenter or rack.
|
|
Each data has a name and its location in the network is
|
|
decided by a string with syntax similar to a file name.
|
|
For example, a data node's name is hostname:port# and if it's located at
|
|
rack "orange" in datacenter "dog", the string representation of its
|
|
network location is /dog/orange]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.net.Node -->
|
|
<!-- start class org.apache.hadoop.net.NodeBase -->
|
|
<class name="NodeBase" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.net.Node"/>
|
|
<constructor name="NodeBase"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="NodeBase" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a node from its path
|
|
@param path
|
|
a concatenation of this node's location, the path seperator, and its name]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="NodeBase" type="java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a node from its name and its location
|
|
@param name this node's name
|
|
@param location this node's location]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="NodeBase" type="java.lang.String, java.lang.String, org.apache.hadoop.net.Node, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a node from its name and its location
|
|
@param name this node's name
|
|
@param location this node's location
|
|
@param parent this node's parent node
|
|
@param level this node's level in the tree]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNetworkLocation" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's network location]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setNetworkLocation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="location" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Set this node's network location]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPath" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="node" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Return this node's path]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's string representation]]>
|
|
</doc>
|
|
</method>
|
|
<method name="normalize" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="path" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Normalize a path]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getParent" return="org.apache.hadoop.net.Node"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's parent]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setParent"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="parent" type="org.apache.hadoop.net.Node"/>
|
|
<doc>
|
|
<![CDATA[Set this node's parent]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLevel" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return this node's level in the tree.
|
|
E.g. the root of a tree returns 0 and its children return 1]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setLevel"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="level" type="int"/>
|
|
<doc>
|
|
<![CDATA[Set this node's level in the tree]]>
|
|
</doc>
|
|
</method>
|
|
<field name="PATH_SEPARATOR" type="char"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="PATH_SEPARATOR_STR" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="ROOT" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="name" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="location" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="level" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="parent" type="org.apache.hadoop.net.Node"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A base class that implements interface Node]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.NodeBase -->
|
|
<!-- start class org.apache.hadoop.net.ScriptBasedMapping -->
|
|
<class name="ScriptBasedMapping" extends="org.apache.hadoop.net.CachedDNSToSwitchMapping"
|
|
abstract="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="ScriptBasedMapping"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="ScriptBasedMapping" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class implements the {@link DNSToSwitchMapping} interface using a
|
|
script configured via topology.script.file.name .]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.ScriptBasedMapping -->
|
|
<!-- start class org.apache.hadoop.net.SocketInputStream -->
|
|
<class name="SocketInputStream" extends="java.io.InputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.nio.channels.ReadableByteChannel"/>
|
|
<constructor name="SocketInputStream" type="java.nio.channels.ReadableByteChannel, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a new input stream with the given timeout. If the timeout
|
|
is zero, it will be treated as infinite timeout. The socket's
|
|
channel will be configured to be non-blocking.
|
|
|
|
@param channel
|
|
Channel for reading, should also be a {@link SelectableChannel}.
|
|
The channel will be configured to be non-blocking.
|
|
@param timeout timeout in milliseconds. must not be negative.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SocketInputStream" type="java.net.Socket, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Same as SocketInputStream(socket.getChannel(), timeout): <br><br>
|
|
|
|
Create a new input stream with the given timeout. If the timeout
|
|
is zero, it will be treated as infinite timeout. The socket's
|
|
channel will be configured to be non-blocking.
|
|
|
|
@see SocketInputStream#SocketInputStream(ReadableByteChannel, long)
|
|
|
|
@param socket should have a channel associated with it.
|
|
@param timeout timeout timeout in milliseconds. must not be negative.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SocketInputStream" type="java.net.Socket"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Same as SocketInputStream(socket.getChannel(), socket.getSoTimeout())
|
|
:<br><br>
|
|
|
|
Create a new input stream with the given timeout. If the timeout
|
|
is zero, it will be treated as infinite timeout. The socket's
|
|
channel will be configured to be non-blocking.
|
|
@see SocketInputStream#SocketInputStream(ReadableByteChannel, long)
|
|
|
|
@param socket should have a channel associated with it.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getChannel" return="java.nio.channels.ReadableByteChannel"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns underlying channel used by inputstream.
|
|
This is useful in certain cases like channel for
|
|
{@link FileChannel#transferFrom(ReadableByteChannel, long, long)}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isOpen" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="read" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dst" type="java.nio.ByteBuffer"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="waitForReadable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[waits for the underlying channel to be ready for reading.
|
|
The timeout specified for this stream applies to this wait.
|
|
|
|
@throws SocketTimeoutException
|
|
if select on the channel times out.
|
|
@throws IOException
|
|
if any other I/O error occurs.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This implements an input stream that can have a timeout while reading.
|
|
This sets non-blocking flag on the socket channel.
|
|
So after create this object, read() on
|
|
{@link Socket#getInputStream()} and write() on
|
|
{@link Socket#getOutputStream()} for the associated socket will throw
|
|
IllegalBlockingModeException.
|
|
Please use {@link SocketOutputStream} for writing.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.SocketInputStream -->
|
|
<!-- start class org.apache.hadoop.net.SocketOutputStream -->
|
|
<class name="SocketOutputStream" extends="java.io.OutputStream"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.nio.channels.WritableByteChannel"/>
|
|
<constructor name="SocketOutputStream" type="java.nio.channels.WritableByteChannel, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a new ouput stream with the given timeout. If the timeout
|
|
is zero, it will be treated as infinite timeout. The socket's
|
|
channel will be configured to be non-blocking.
|
|
|
|
@param channel
|
|
Channel for writing, should also be a {@link SelectableChannel}.
|
|
The channel will be configured to be non-blocking.
|
|
@param timeout timeout in milliseconds. must not be negative.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SocketOutputStream" type="java.net.Socket, long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Same as SocketOutputStream(socket.getChannel(), timeout):<br><br>
|
|
|
|
Create a new ouput stream with the given timeout. If the timeout
|
|
is zero, it will be treated as infinite timeout. The socket's
|
|
channel will be configured to be non-blocking.
|
|
|
|
@see SocketOutputStream#SocketOutputStream(WritableByteChannel, long)
|
|
|
|
@param socket should have a channel associated with it.
|
|
@param timeout timeout timeout in milliseconds. must not be negative.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getChannel" return="java.nio.channels.WritableByteChannel"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns underlying channel used by this stream.
|
|
This is useful in certain cases like channel for
|
|
{@link FileChannel#transferTo(long, long, WritableByteChannel)}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isOpen" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="write" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="java.nio.ByteBuffer"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="waitForWritable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[waits for the underlying channel to be ready for writing.
|
|
The timeout specified for this stream applies to this wait.
|
|
|
|
@throws SocketTimeoutException
|
|
if select on the channel times out.
|
|
@throws IOException
|
|
if any other I/O error occurs.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="transferToFully"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fileCh" type="java.nio.channels.FileChannel"/>
|
|
<param name="position" type="long"/>
|
|
<param name="count" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Transfers data from FileChannel using
|
|
{@link FileChannel#transferTo(long, long, WritableByteChannel)}.
|
|
|
|
Similar to readFully(), this waits till requested amount of
|
|
data is transfered.
|
|
|
|
@param fileCh FileChannel to transfer data from.
|
|
@param position position within the channel where the transfer begins
|
|
@param count number of bytes to transfer.
|
|
|
|
@throws EOFException
|
|
If end of input file is reached before requested number of
|
|
bytes are transfered.
|
|
|
|
@throws SocketTimeoutException
|
|
If this channel blocks transfer longer than timeout for
|
|
this stream.
|
|
|
|
@throws IOException Includes any exception thrown by
|
|
{@link FileChannel#transferTo(long, long, WritableByteChannel)}.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This implements an output stream that can have a timeout while writing.
|
|
This sets non-blocking flag on the socket channel.
|
|
So after creating this object , read() on
|
|
{@link Socket#getInputStream()} and write() on
|
|
{@link Socket#getOutputStream()} on the associated socket will throw
|
|
llegalBlockingModeException.
|
|
Please use {@link SocketInputStream} for reading.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.SocketOutputStream -->
|
|
<!-- start class org.apache.hadoop.net.SocksSocketFactory -->
|
|
<class name="SocksSocketFactory" extends="javax.net.SocketFactory"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="SocksSocketFactory"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default empty constructor (for use with the reflection API).]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="SocksSocketFactory" type="java.net.Proxy"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor with a supplied Proxy
|
|
|
|
@param proxy the proxy to use to create sockets]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="addr" type="java.net.InetAddress"/>
|
|
<param name="port" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="addr" type="java.net.InetAddress"/>
|
|
<param name="port" type="int"/>
|
|
<param name="localHostAddr" type="java.net.InetAddress"/>
|
|
<param name="localPort" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="host" type="java.lang.String"/>
|
|
<param name="port" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="host" type="java.lang.String"/>
|
|
<param name="port" type="int"/>
|
|
<param name="localHostAddr" type="java.net.InetAddress"/>
|
|
<param name="localPort" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="obj" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Specialized SocketFactory to create sockets with a SOCKS proxy]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.SocksSocketFactory -->
|
|
<!-- start class org.apache.hadoop.net.StandardSocketFactory -->
|
|
<class name="StandardSocketFactory" extends="javax.net.SocketFactory"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="StandardSocketFactory"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default empty constructor (for use with the reflection API).]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="addr" type="java.net.InetAddress"/>
|
|
<param name="port" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="addr" type="java.net.InetAddress"/>
|
|
<param name="port" type="int"/>
|
|
<param name="localHostAddr" type="java.net.InetAddress"/>
|
|
<param name="localPort" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="host" type="java.lang.String"/>
|
|
<param name="port" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
</method>
|
|
<method name="createSocket" return="java.net.Socket"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="host" type="java.lang.String"/>
|
|
<param name="port" type="int"/>
|
|
<param name="localHostAddr" type="java.net.InetAddress"/>
|
|
<param name="localPort" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<exception name="UnknownHostException" type="java.net.UnknownHostException"/>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="obj" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Specialized SocketFactory to create sockets with a SOCKS proxy]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.net.StandardSocketFactory -->
|
|
</package>
|
|
<package name="org.apache.hadoop.record">
|
|
<!-- start class org.apache.hadoop.record.BinaryRecordInput -->
|
|
<class name="BinaryRecordInput" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.RecordInput"/>
|
|
<constructor name="BinaryRecordInput" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of BinaryRecordInput]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="BinaryRecordInput" type="java.io.DataInput"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of BinaryRecordInput]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="get" return="org.apache.hadoop.record.BinaryRecordInput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inp" type="java.io.DataInput"/>
|
|
<doc>
|
|
<![CDATA[Get a thread-local record input for the supplied DataInput.
|
|
@param inp data input stream
|
|
@return binary record input corresponding to the supplied DataInput.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readByte" return="byte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readBool" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFloat" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readDouble" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readBuffer" return="org.apache.hadoop.record.Buffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startVector" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startMap" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.BinaryRecordInput -->
|
|
<!-- start class org.apache.hadoop.record.BinaryRecordOutput -->
|
|
<class name="BinaryRecordOutput" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.RecordOutput"/>
|
|
<constructor name="BinaryRecordOutput" type="java.io.OutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of BinaryRecordOutput]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="BinaryRecordOutput" type="java.io.DataOutput"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of BinaryRecordOutput]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="get" return="org.apache.hadoop.record.BinaryRecordOutput"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<doc>
|
|
<![CDATA[Get a thread-local record output for the supplied DataOutput.
|
|
@param out data output stream
|
|
@return binary record output corresponding to the supplied DataOutput.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeByte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeBool"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="boolean"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeInt"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeLong"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="l" type="long"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeFloat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="float"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeDouble"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="d" type="double"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeString"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="java.lang.String"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="org.apache.hadoop.record.Buffer"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.BinaryRecordOutput -->
|
|
<!-- start class org.apache.hadoop.record.Buffer -->
|
|
<class name="Buffer" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Comparable"/>
|
|
<implements name="java.lang.Cloneable"/>
|
|
<constructor name="Buffer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a zero-count sequence.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Buffer" type="byte[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a Buffer using the byte array as the initial value.
|
|
|
|
@param bytes This array becomes the backing storage for the object.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Buffer" type="byte[], int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a Buffer using the byte range as the initial value.
|
|
|
|
@param bytes Copy of this array becomes the backing storage for the object.
|
|
@param offset offset into byte array
|
|
@param length length of data]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<doc>
|
|
<![CDATA[Use the specified bytes array as underlying sequence.
|
|
|
|
@param bytes byte sequence]]>
|
|
</doc>
|
|
</method>
|
|
<method name="copy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Copy the specified byte array to the Buffer. Replaces the current buffer.
|
|
|
|
@param bytes byte array to be assigned
|
|
@param offset offset into byte array
|
|
@param length length of data]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the data from the Buffer.
|
|
|
|
@return The data is only valid between 0 and getCount() - 1.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCount" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the current count of the buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCapacity" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the capacity, which is the maximum count that could handled without
|
|
resizing the backing storage.
|
|
|
|
@return The number of bytes]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCapacity"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newCapacity" type="int"/>
|
|
<doc>
|
|
<![CDATA[Change the capacity of the backing storage.
|
|
The data is preserved if newCapacity >= getCount().
|
|
@param newCapacity The new capacity in bytes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Reset the buffer to 0 size]]>
|
|
</doc>
|
|
</method>
|
|
<method name="truncate"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Change the capacity of the backing store to be the same as the current
|
|
count of buffer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<doc>
|
|
<![CDATA[Append specified bytes to the buffer.
|
|
|
|
@param bytes byte array to be appended
|
|
@param offset offset into byte array
|
|
@param length length of data]]>
|
|
</doc>
|
|
</method>
|
|
<method name="append"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<doc>
|
|
<![CDATA[Append specified bytes to the buffer
|
|
|
|
@param bytes byte array to be appended]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Define the sort order of the Buffer.
|
|
|
|
@param other The other buffer
|
|
@return Positive if this is bigger than other, 0 if they are equal, and
|
|
negative if this is smaller than other.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="charsetName" type="java.lang.String"/>
|
|
<exception name="UnsupportedEncodingException" type="java.io.UnsupportedEncodingException"/>
|
|
<doc>
|
|
<![CDATA[Convert the byte buffer to a string an specific character encoding
|
|
|
|
@param charsetName Valid Java Character Set Name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="clone" return="java.lang.Object"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="CloneNotSupportedException" type="java.lang.CloneNotSupportedException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A byte sequence that is used as a Java native type for buffer.
|
|
It is resizable and distinguishes between the count of the seqeunce and
|
|
the current capacity.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.Buffer -->
|
|
<!-- start class org.apache.hadoop.record.CsvRecordInput -->
|
|
<class name="CsvRecordInput" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.RecordInput"/>
|
|
<constructor name="CsvRecordInput" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of CsvRecordInput]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="readByte" return="byte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readBool" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFloat" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readDouble" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readBuffer" return="org.apache.hadoop.record.Buffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startVector" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startMap" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.CsvRecordInput -->
|
|
<!-- start class org.apache.hadoop.record.CsvRecordOutput -->
|
|
<class name="CsvRecordOutput" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.RecordOutput"/>
|
|
<constructor name="CsvRecordOutput" type="java.io.OutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of CsvRecordOutput]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="writeByte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeBool"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="boolean"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeInt"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeLong"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="l" type="long"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeFloat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="float"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeDouble"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="d" type="double"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeString"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="java.lang.String"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="org.apache.hadoop.record.Buffer"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.CsvRecordOutput -->
|
|
<!-- start interface org.apache.hadoop.record.Index -->
|
|
<interface name="Index" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="done" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="incr"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface that acts as an iterator for deserializing maps.
|
|
The deserializer returns an instance that the record uses to
|
|
read vectors and maps. An example of usage is as follows:
|
|
|
|
<code>
|
|
Index idx = startVector(...);
|
|
while (!idx.done()) {
|
|
.... // read element of a vector
|
|
idx.incr();
|
|
}
|
|
</code>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.record.Index -->
|
|
<!-- start class org.apache.hadoop.record.Record -->
|
|
<class name="Record" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.WritableComparable"/>
|
|
<implements name="java.lang.Cloneable"/>
|
|
<constructor name="Record"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="serialize"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rout" type="org.apache.hadoop.record.RecordOutput"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serialize a record with tag (ususally field name)
|
|
@param rout Record output destination
|
|
@param tag record tag (Used only in tagged serialization e.g. XML)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="deserialize"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rin" type="org.apache.hadoop.record.RecordInput"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Deserialize a record with a tag (usually field name)
|
|
@param rin Record input source
|
|
@param tag Record tag (Used only in tagged serialization e.g. XML)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="peer" type="java.lang.Object"/>
|
|
<exception name="ClassCastException" type="java.lang.ClassCastException"/>
|
|
</method>
|
|
<method name="serialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rout" type="org.apache.hadoop.record.RecordOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serialize a record without a tag
|
|
@param rout Record output destination]]>
|
|
</doc>
|
|
</method>
|
|
<method name="deserialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rin" type="org.apache.hadoop.record.RecordInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Deserialize a record without a tag
|
|
@param rin Record input source]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="din" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Abstract class that is extended by generated classes.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.Record -->
|
|
<!-- start class org.apache.hadoop.record.RecordComparator -->
|
|
<class name="RecordComparator" extends="org.apache.hadoop.io.WritableComparator"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RecordComparator" type="java.lang.Class<? extends org.apache.hadoop.io.WritableComparable>"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a raw {@link Record} comparison implementation.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="compare" return="int"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<method name="define"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class"/>
|
|
<param name="comparator" type="org.apache.hadoop.record.RecordComparator"/>
|
|
<doc>
|
|
<![CDATA[Register an optimized comparator for a {@link Record} implementation.
|
|
|
|
@param c record classs for which a raw comparator is provided
|
|
@param comparator Raw comparator instance for class c]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A raw record comparator base class]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.RecordComparator -->
|
|
<!-- start interface org.apache.hadoop.record.RecordInput -->
|
|
<interface name="RecordInput" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="readByte" return="byte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a byte from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readBool" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a boolean from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read an integer from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a long integer from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFloat" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a single-precision float from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readDouble" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a double-precision number from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a UTF-8 encoded string from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readBuffer" return="org.apache.hadoop.record.Buffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read byte array from serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return value read from serialized record.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check the mark for start of the serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check the mark for end of the serialized record.
|
|
@param tag Used by tagged serialization formats (such as XML)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startVector" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check the mark for start of the serialized vector.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return Index that is used to count the number of elements.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check the mark for end of the serialized vector.
|
|
@param tag Used by tagged serialization formats (such as XML)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startMap" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check the mark for start of the serialized map.
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@return Index that is used to count the number of map entries.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Check the mark for end of the serialized map.
|
|
@param tag Used by tagged serialization formats (such as XML)]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface that all the Deserializers have to implement.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.record.RecordInput -->
|
|
<!-- start interface org.apache.hadoop.record.RecordOutput -->
|
|
<interface name="RecordOutput" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="writeByte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a byte to serialized record.
|
|
@param b Byte to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeBool"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="boolean"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a boolean to serialized record.
|
|
@param b Boolean to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeInt"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write an integer to serialized record.
|
|
@param i Integer to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeLong"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="l" type="long"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a long integer to serialized record.
|
|
@param l Long to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeFloat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="float"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a single-precision float to serialized record.
|
|
@param f Float to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeDouble"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="d" type="double"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a double precision floating point number to serialized record.
|
|
@param d Double to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeString"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="java.lang.String"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a unicode string to serialized record.
|
|
@param s String to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="org.apache.hadoop.record.Buffer"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Write a buffer to serialized record.
|
|
@param buf Buffer to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark the start of a record to be serialized.
|
|
@param r Record to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark the end of a serialized record.
|
|
@param r Record to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark the start of a vector to be serialized.
|
|
@param v Vector to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark the end of a serialized vector.
|
|
@param v Vector to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="m" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark the start of a map to be serialized.
|
|
@param m Map to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="m" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Mark the end of a serialized map.
|
|
@param m Map to be serialized
|
|
@param tag Used by tagged serialization formats (such as XML)
|
|
@throws IOException Indicates error in serialization]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface that alll the serializers have to implement.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.record.RecordOutput -->
|
|
<!-- start class org.apache.hadoop.record.Utils -->
|
|
<class name="Utils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="readFloat" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse a float from a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readDouble" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<doc>
|
|
<![CDATA[Parse a double from a byte array.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded long from a byte array and returns it.
|
|
@param bytes byte array with decode long
|
|
@param start starting index
|
|
@throws java.io.IOException
|
|
@return deserialized long]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded integer from a byte array and returns it.
|
|
@param bytes byte array with the encoded integer
|
|
@param start start index
|
|
@throws java.io.IOException
|
|
@return deserialized integer]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded long from a stream and return it.
|
|
@param in input stream
|
|
@throws java.io.IOException
|
|
@return deserialized long]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readVInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Reads a zero-compressed encoded integer from a stream and returns it.
|
|
@param in input stream
|
|
@throws java.io.IOException
|
|
@return deserialized integer]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getVIntSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="long"/>
|
|
<doc>
|
|
<![CDATA[Get the encoded length if an integer is stored in a variable-length format
|
|
@return the encoded length]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeVLong"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.DataOutput"/>
|
|
<param name="i" type="long"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serializes a long to a binary stream with zero-compressed encoding.
|
|
For -112 <= i <= 127, only one byte is used with the actual value.
|
|
For other values of i, the first byte value indicates whether the
|
|
long is positive or negative, and the number of bytes that follow.
|
|
If the first byte value v is between -113 and -120, the following long
|
|
is positive, with number of bytes that follow are -(v+112).
|
|
If the first byte value v is between -121 and -128, the following long
|
|
is negative, with number of bytes that follow are -(v+120). Bytes are
|
|
stored in the high-non-zero-byte-first order.
|
|
|
|
@param stream Binary output stream
|
|
@param i Long to be serialized
|
|
@throws java.io.IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeVInt"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.DataOutput"/>
|
|
<param name="i" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serializes an int to a binary stream with zero-compressed encoding.
|
|
|
|
@param stream Binary output stream
|
|
@param i int to be serialized
|
|
@throws java.io.IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareBytes" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
<doc>
|
|
<![CDATA[Lexicographic order of binary data.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="hexchars" type="char[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Various utility functions for Hadooop record I/O runtime.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.Utils -->
|
|
<!-- start class org.apache.hadoop.record.XmlRecordInput -->
|
|
<class name="XmlRecordInput" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.RecordInput"/>
|
|
<constructor name="XmlRecordInput" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of XmlRecordInput]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="readByte" return="byte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readBool" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readInt" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readLong" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readFloat" return="float"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readDouble" return="double"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="readBuffer" return="org.apache.hadoop.record.Buffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startVector" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startMap" return="org.apache.hadoop.record.Index"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[XML Deserializer.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.XmlRecordInput -->
|
|
<!-- start class org.apache.hadoop.record.XmlRecordOutput -->
|
|
<class name="XmlRecordOutput" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.RecordOutput"/>
|
|
<constructor name="XmlRecordOutput" type="java.io.OutputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of XmlRecordOutput]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="writeByte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeBool"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="boolean"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeInt"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeLong"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="l" type="long"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeFloat"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="f" type="float"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeDouble"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="d" type="double"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeString"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="java.lang.String"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="writeBuffer"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="org.apache.hadoop.record.Buffer"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="r" type="org.apache.hadoop.record.Record"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.ArrayList"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="startMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="endMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="v" type="java.util.TreeMap"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[XML Serializer.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.XmlRecordOutput -->
|
|
</package>
|
|
<package name="org.apache.hadoop.record.compiler">
|
|
<!-- start class org.apache.hadoop.record.compiler.CodeBuffer -->
|
|
<class name="CodeBuffer" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A wrapper around StringBuffer that automatically does indentation]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.CodeBuffer -->
|
|
<!-- start class org.apache.hadoop.record.compiler.Consts -->
|
|
<class name="Consts" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<field name="RIO_PREFIX" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RTI_VAR" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RTI_FILTER" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RTI_FILTER_FIELDS" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RECORD_OUTPUT" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RECORD_INPUT" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="TAG" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[const definitions for Record I/O compiler]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.Consts -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JBoolean -->
|
|
<class name="JBoolean" extends="org.apache.hadoop.record.compiler.JType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JBoolean"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JBoolean]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JBoolean -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JBuffer -->
|
|
<class name="JBuffer" extends="org.apache.hadoop.record.compiler.JCompType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JBuffer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JBuffer]]>
|
|
</doc>
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Code generator for "buffer" type.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JBuffer -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JByte -->
|
|
<class name="JByte" extends="org.apache.hadoop.record.compiler.JType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JByte"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Code generator for "byte" type.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JByte -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JDouble -->
|
|
<class name="JDouble" extends="org.apache.hadoop.record.compiler.JType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JDouble"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JDouble]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JDouble -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JField -->
|
|
<class name="JField" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JField" type="java.lang.String, T"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JField]]>
|
|
</doc>
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[A thin wrappper around record field.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JField -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JFile -->
|
|
<class name="JFile" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JFile" type="java.lang.String, java.util.ArrayList<org.apache.hadoop.record.compiler.JFile>, java.util.ArrayList<org.apache.hadoop.record.compiler.JRecord>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JFile
|
|
|
|
@param name possibly full pathname to the file
|
|
@param inclFiles included files (as JFile)
|
|
@param recList List of records defined within this file]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="genCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="language" type="java.lang.String"/>
|
|
<param name="destDir" type="java.lang.String"/>
|
|
<param name="options" type="java.util.ArrayList<java.lang.String>"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Generate record code in given language. Language should be all
|
|
lowercase.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Container for the Hadoop Record DDL.
|
|
The main components of the file are filename, list of included files,
|
|
and records defined in that file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JFile -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JFloat -->
|
|
<class name="JFloat" extends="org.apache.hadoop.record.compiler.JType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JFloat"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JFloat]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JFloat -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JInt -->
|
|
<class name="JInt" extends="org.apache.hadoop.record.compiler.JType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JInt"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JInt]]>
|
|
</doc>
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Code generator for "int" type]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JInt -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JLong -->
|
|
<class name="JLong" extends="org.apache.hadoop.record.compiler.JType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JLong"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JLong]]>
|
|
</doc>
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Code generator for "long" type]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JLong -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JMap -->
|
|
<class name="JMap" extends="org.apache.hadoop.record.compiler.JCompType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JMap" type="org.apache.hadoop.record.compiler.JType, org.apache.hadoop.record.compiler.JType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JMap]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JMap -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JRecord -->
|
|
<class name="JRecord" extends="org.apache.hadoop.record.compiler.JCompType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JRecord" type="java.lang.String, java.util.ArrayList<org.apache.hadoop.record.compiler.JField<org.apache.hadoop.record.compiler.JType>>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JRecord]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JRecord -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JString -->
|
|
<class name="JString" extends="org.apache.hadoop.record.compiler.JCompType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JString"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JString]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JString -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JType -->
|
|
<class name="JType" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[Abstract Base class for all types supported by Hadoop Record I/O.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JType -->
|
|
<!-- start class org.apache.hadoop.record.compiler.JVector -->
|
|
<class name="JVector" extends="org.apache.hadoop.record.compiler.JCompType"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="JVector" type="org.apache.hadoop.record.compiler.JType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of JVector]]>
|
|
</doc>
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.JVector -->
|
|
</package>
|
|
<package name="org.apache.hadoop.record.compiler.ant">
|
|
<!-- start class org.apache.hadoop.record.compiler.ant.RccTask -->
|
|
<class name="RccTask" extends="org.apache.tools.ant.Task"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RccTask"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new instance of RccTask]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="setLanguage"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="language" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Sets the output language option
|
|
@param language "java"/"c++"]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="file" type="java.io.File"/>
|
|
<doc>
|
|
<![CDATA[Sets the record definition file attribute
|
|
@param file record definition file]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setFailonerror"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="flag" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Given multiple files (via fileset), set the error handling behavior
|
|
@param flag true will throw build exception in case of failure (default)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setDestdir"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="java.io.File"/>
|
|
<doc>
|
|
<![CDATA[Sets directory where output files will be generated
|
|
@param dir output directory]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addFileset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="set" type="org.apache.tools.ant.types.FileSet"/>
|
|
<doc>
|
|
<![CDATA[Adds a fileset that can consist of one or more files
|
|
@param set Set of record definition files]]>
|
|
</doc>
|
|
</method>
|
|
<method name="execute"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="BuildException" type="org.apache.tools.ant.BuildException"/>
|
|
<doc>
|
|
<![CDATA[Invoke the Hadoop record compiler on each record definition file]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Hadoop record compiler ant Task
|
|
<p> This task takes the given record definition files and compiles them into
|
|
java or c++
|
|
files. It is then up to the user to compile the generated files.
|
|
|
|
<p> The task requires the <code>file</code> or the nested fileset element to be
|
|
specified. Optional attributes are <code>language</code> (set the output
|
|
language, default is "java"),
|
|
<code>destdir</code> (name of the destination directory for generated java/c++
|
|
code, default is ".") and <code>failonerror</code> (specifies error handling
|
|
behavior. default is true).
|
|
<p><h4>Usage</h4>
|
|
<pre>
|
|
<recordcc
|
|
destdir="${basedir}/gensrc"
|
|
language="java">
|
|
<fileset include="**\/*.jr" />
|
|
</recordcc>
|
|
</pre>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.ant.RccTask -->
|
|
</package>
|
|
<package name="org.apache.hadoop.record.compiler.generated">
|
|
<!-- start class org.apache.hadoop.record.compiler.generated.ParseException -->
|
|
<class name="ParseException" extends="java.lang.Exception"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ParseException" type="org.apache.hadoop.record.compiler.generated.Token, int[][], java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This constructor is used by the method "generateParseException"
|
|
in the generated parser. Calling this constructor generates
|
|
a new object of this type with the fields "currentToken",
|
|
"expectedTokenSequences", and "tokenImage" set. The boolean
|
|
flag "specialConstructor" is also set to true to indicate that
|
|
this constructor was used to create this object.
|
|
This constructor calls its super class with the empty string
|
|
to force the "toString" method of parent class "Throwable" to
|
|
print the error message in the form:
|
|
ParseException: <result of getMessage>]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ParseException"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The following constructors are for use by you for whatever
|
|
purpose you can think of. Constructing the exception in this
|
|
manner makes the exception behave in the normal way - i.e., as
|
|
documented in the class "Throwable". The fields "errorToken",
|
|
"expectedTokenSequences", and "tokenImage" do not contain
|
|
relevant information. The JavaCC generated code does not use
|
|
these constructors.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="ParseException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getMessage" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This method has the standard behavior when this object has been
|
|
created using the standard constructors. Otherwise, it uses
|
|
"currentToken" and "expectedTokenSequences" to generate a parse
|
|
error message and returns it. If this object has been created
|
|
due to a parse error, and you do not catch it (it gets thrown
|
|
from the parser), then this method is called during the printing
|
|
of the final stack trace, and hence the correct error message
|
|
gets displayed.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="add_escapes" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Used to convert raw characters to their escaped version
|
|
when these raw version cannot be used as part of an ASCII
|
|
string literal.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="specialConstructor" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This variable determines which constructor was used to create
|
|
this object and thereby affects the semantics of the
|
|
"getMessage" method (see below).]]>
|
|
</doc>
|
|
</field>
|
|
<field name="currentToken" type="org.apache.hadoop.record.compiler.generated.Token"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This is the last token that has been consumed successfully. If
|
|
this object has been created due to a parse error, the token
|
|
followng this token will (therefore) be the first error token.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="expectedTokenSequences" type="int[][]"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Each entry in this array is an array of integers. Each array
|
|
of integers represents a sequence of tokens (by their ordinal
|
|
values) that is expected at this point of the parse.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="tokenImage" type="java.lang.String[]"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This is a reference to the "tokenImage" array of the generated
|
|
parser within which the parse error occurred. This array is
|
|
defined in the generated ...Constants interface.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="eol" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The end of line string for this machine.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This exception is thrown when parse errors are encountered.
|
|
You can explicitly create objects of this exception type by
|
|
calling the method generateParseException in the generated
|
|
parser.
|
|
|
|
You can modify this class to customize your error reporting
|
|
mechanisms so long as you retain the public fields.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.generated.ParseException -->
|
|
<!-- start class org.apache.hadoop.record.compiler.generated.Rcc -->
|
|
<class name="Rcc" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.compiler.generated.RccConstants"/>
|
|
<constructor name="Rcc" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Rcc" type="java.io.InputStream, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Rcc" type="java.io.Reader"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Rcc" type="org.apache.hadoop.record.compiler.generated.RccTokenManager"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
</method>
|
|
<method name="usage"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="driver" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
</method>
|
|
<method name="Input" return="org.apache.hadoop.record.compiler.JFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="Include" return="org.apache.hadoop.record.compiler.JFile"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="Module" return="java.util.ArrayList<org.apache.hadoop.record.compiler.JRecord>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="ModuleName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="RecordList" return="java.util.ArrayList<org.apache.hadoop.record.compiler.JRecord>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="Record" return="org.apache.hadoop.record.compiler.JRecord"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="Field" return="org.apache.hadoop.record.compiler.JField<org.apache.hadoop.record.compiler.JType>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="Type" return="org.apache.hadoop.record.compiler.JType"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="Map" return="org.apache.hadoop.record.compiler.JMap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="Vector" return="org.apache.hadoop.record.compiler.JVector"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="ParseException" type="org.apache.hadoop.record.compiler.generated.ParseException"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.InputStream"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.InputStream"/>
|
|
<param name="encoding" type="java.lang.String"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.Reader"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tm" type="org.apache.hadoop.record.compiler.generated.RccTokenManager"/>
|
|
</method>
|
|
<method name="getNextToken" return="org.apache.hadoop.record.compiler.generated.Token"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getToken" return="org.apache.hadoop.record.compiler.generated.Token"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="index" type="int"/>
|
|
</method>
|
|
<method name="generateParseException" return="org.apache.hadoop.record.compiler.generated.ParseException"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="enable_tracing"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="disable_tracing"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="token_source" type="org.apache.hadoop.record.compiler.generated.RccTokenManager"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="token" type="org.apache.hadoop.record.compiler.generated.Token"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="jj_nt" type="org.apache.hadoop.record.compiler.generated.Token"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.generated.Rcc -->
|
|
<!-- start interface org.apache.hadoop.record.compiler.generated.RccConstants -->
|
|
<interface name="RccConstants" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<field name="EOF" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="MODULE_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RECORD_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="INCLUDE_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="BYTE_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="BOOLEAN_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="INT_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LONG_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="FLOAT_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DOUBLE_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="USTRING_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="BUFFER_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="VECTOR_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="MAP_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LBRACE_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="RBRACE_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LT_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="GT_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="SEMICOLON_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="COMMA_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DOT_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="CSTRING_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="IDENT_TKN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DEFAULT" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="WithinOneLineComment" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="WithinMultiLineComment" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="tokenImage" type="java.lang.String[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.record.compiler.generated.RccConstants -->
|
|
<!-- start class org.apache.hadoop.record.compiler.generated.RccTokenManager -->
|
|
<class name="RccTokenManager" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.record.compiler.generated.RccConstants"/>
|
|
<constructor name="RccTokenManager" type="org.apache.hadoop.record.compiler.generated.SimpleCharStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="RccTokenManager" type="org.apache.hadoop.record.compiler.generated.SimpleCharStream, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setDebugStream"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ds" type="java.io.PrintStream"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="org.apache.hadoop.record.compiler.generated.SimpleCharStream"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="org.apache.hadoop.record.compiler.generated.SimpleCharStream"/>
|
|
<param name="lexState" type="int"/>
|
|
</method>
|
|
<method name="SwitchTo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="lexState" type="int"/>
|
|
</method>
|
|
<method name="jjFillToken" return="org.apache.hadoop.record.compiler.generated.Token"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getNextToken" return="org.apache.hadoop.record.compiler.generated.Token"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="debugStream" type="java.io.PrintStream"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="jjstrLiteralImages" type="java.lang.String[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="lexStateNames" type="java.lang.String[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="jjnewLexState" type="int[]"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="input_stream" type="org.apache.hadoop.record.compiler.generated.SimpleCharStream"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="curChar" type="char"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.generated.RccTokenManager -->
|
|
<!-- start class org.apache.hadoop.record.compiler.generated.SimpleCharStream -->
|
|
<class name="SimpleCharStream" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="SimpleCharStream" type="java.io.Reader, int, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.Reader, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.Reader"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.InputStream, java.lang.String, int, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="UnsupportedEncodingException" type="java.io.UnsupportedEncodingException"/>
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.InputStream, int, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.InputStream, java.lang.String, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="UnsupportedEncodingException" type="java.io.UnsupportedEncodingException"/>
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.InputStream, int, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.InputStream, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="UnsupportedEncodingException" type="java.io.UnsupportedEncodingException"/>
|
|
</constructor>
|
|
<constructor name="SimpleCharStream" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setTabSize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
</method>
|
|
<method name="getTabSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
</method>
|
|
<method name="ExpandBuff"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="wrapAround" type="boolean"/>
|
|
</method>
|
|
<method name="FillBuff"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="BeginToken" return="char"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="UpdateLineColumn"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="char"/>
|
|
</method>
|
|
<method name="readChar" return="char"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getEndColumn" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getEndLine" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getBeginColumn" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getBeginLine" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="backup"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="amount" type="int"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.Reader"/>
|
|
<param name="startline" type="int"/>
|
|
<param name="startcolumn" type="int"/>
|
|
<param name="buffersize" type="int"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.Reader"/>
|
|
<param name="startline" type="int"/>
|
|
<param name="startcolumn" type="int"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.Reader"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.InputStream"/>
|
|
<param name="encoding" type="java.lang.String"/>
|
|
<param name="startline" type="int"/>
|
|
<param name="startcolumn" type="int"/>
|
|
<param name="buffersize" type="int"/>
|
|
<exception name="UnsupportedEncodingException" type="java.io.UnsupportedEncodingException"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.InputStream"/>
|
|
<param name="startline" type="int"/>
|
|
<param name="startcolumn" type="int"/>
|
|
<param name="buffersize" type="int"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.InputStream"/>
|
|
<param name="encoding" type="java.lang.String"/>
|
|
<exception name="UnsupportedEncodingException" type="java.io.UnsupportedEncodingException"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.InputStream"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.InputStream"/>
|
|
<param name="encoding" type="java.lang.String"/>
|
|
<param name="startline" type="int"/>
|
|
<param name="startcolumn" type="int"/>
|
|
<exception name="UnsupportedEncodingException" type="java.io.UnsupportedEncodingException"/>
|
|
</method>
|
|
<method name="ReInit"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dstream" type="java.io.InputStream"/>
|
|
<param name="startline" type="int"/>
|
|
<param name="startcolumn" type="int"/>
|
|
</method>
|
|
<method name="GetImage" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="GetSuffix" return="char[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="Done"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="adjustBeginLineColumn"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="newLine" type="int"/>
|
|
<param name="newCol" type="int"/>
|
|
<doc>
|
|
<![CDATA[Method to adjust line and column numbers for the start of a token.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="staticFlag" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="bufpos" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="bufline" type="int[]"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="bufcolumn" type="int[]"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="column" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="line" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="prevCharIsCR" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="prevCharIsLF" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="inputStream" type="java.io.Reader"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="buffer" type="char[]"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="maxNextCharInd" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="inBuf" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="tabSize" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An implementation of interface CharStream, where the stream is assumed to
|
|
contain only ASCII characters (without unicode processing).]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.generated.SimpleCharStream -->
|
|
<!-- start class org.apache.hadoop.record.compiler.generated.Token -->
|
|
<class name="Token" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Token"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the image.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="newToken" return="org.apache.hadoop.record.compiler.generated.Token"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ofKind" type="int"/>
|
|
<doc>
|
|
<![CDATA[Returns a new Token object, by default. However, if you want, you
|
|
can create and return subclass objects based on the value of ofKind.
|
|
Simply add the cases to the switch for all those special cases.
|
|
For example, if you have a subclass of Token called IDToken that
|
|
you want to create if ofKind is ID, simlpy add something like :
|
|
|
|
case MyParserConstants.ID : return new IDToken();
|
|
|
|
to the following switch statement. Then you can cast matchedToken
|
|
variable to the appropriate type and use it in your lexical actions.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="kind" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[An integer that describes the kind of this token. This numbering
|
|
system is determined by JavaCCParser, and a table of these numbers is
|
|
stored in the file ...Constants.java.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="beginLine" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[beginLine and beginColumn describe the position of the first character
|
|
of this token; endLine and endColumn describe the position of the
|
|
last character of this token.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="beginColumn" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[beginLine and beginColumn describe the position of the first character
|
|
of this token; endLine and endColumn describe the position of the
|
|
last character of this token.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="endLine" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[beginLine and beginColumn describe the position of the first character
|
|
of this token; endLine and endColumn describe the position of the
|
|
last character of this token.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="endColumn" type="int"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[beginLine and beginColumn describe the position of the first character
|
|
of this token; endLine and endColumn describe the position of the
|
|
last character of this token.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="image" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The string image of the token.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="next" type="org.apache.hadoop.record.compiler.generated.Token"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[A reference to the next regular (non-special) token from the input
|
|
stream. If this is the last token from the input stream, or if the
|
|
token manager has not read tokens beyond this one, this field is
|
|
set to null. This is true only if this token is also a regular
|
|
token. Otherwise, see below for a description of the contents of
|
|
this field.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="specialToken" type="org.apache.hadoop.record.compiler.generated.Token"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[This field is used to access special tokens that occur prior to this
|
|
token, but after the immediately preceding regular (non-special) token.
|
|
If there are no such special tokens, this field is set to null.
|
|
When there are more than one such special token, this field refers
|
|
to the last of these special tokens, which in turn refers to the next
|
|
previous special token through its specialToken field, and so on
|
|
until the first special token (whose specialToken field is null).
|
|
The next fields of special tokens refer to other special tokens that
|
|
immediately follow it (without an intervening regular token). If there
|
|
is no such token, this field is null.]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Describes the input token stream.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.generated.Token -->
|
|
<!-- start class org.apache.hadoop.record.compiler.generated.TokenMgrError -->
|
|
<class name="TokenMgrError" extends="java.lang.Error"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TokenMgrError"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="TokenMgrError" type="java.lang.String, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="TokenMgrError" type="boolean, int, int, int, java.lang.String, char, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="addEscapes" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Replaces unprintable characters by their espaced (or unicode escaped)
|
|
equivalents in the given string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="LexicalError" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="EOFSeen" type="boolean"/>
|
|
<param name="lexState" type="int"/>
|
|
<param name="errorLine" type="int"/>
|
|
<param name="errorColumn" type="int"/>
|
|
<param name="errorAfter" type="java.lang.String"/>
|
|
<param name="curChar" type="char"/>
|
|
<doc>
|
|
<![CDATA[Returns a detailed message for the Error when it is thrown by the
|
|
token manager to indicate a lexical error.
|
|
Parameters :
|
|
EOFSeen : indicates if EOF caused the lexicl error
|
|
curLexState : lexical state in which this error occured
|
|
errorLine : line number when the error occured
|
|
errorColumn : column number when the error occured
|
|
errorAfter : prefix that was seen before this error occured
|
|
curchar : the offending character
|
|
Note: You can customize the lexical error message by modifying this method.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getMessage" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[You can also modify the body of this method to customize your error messages.
|
|
For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
|
|
of end-users concern, so you can return something like :
|
|
|
|
"Internal Error : Please file a bug report .... "
|
|
|
|
from this method for such cases in the release version of your parser.]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.compiler.generated.TokenMgrError -->
|
|
</package>
|
|
<package name="org.apache.hadoop.record.meta">
|
|
<!-- start class org.apache.hadoop.record.meta.FieldTypeInfo -->
|
|
<class name="FieldTypeInfo" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getTypeID" return="org.apache.hadoop.record.meta.TypeID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[get the field's TypeID object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFieldID" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[get the field's id (name)]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Two FieldTypeInfos are equal if ach of their fields matches]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[We use a basic hashcode implementation, since this class will likely not
|
|
be used as a hashmap key]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ti" type="org.apache.hadoop.record.meta.FieldTypeInfo"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Represents a type information for a field, which is made up of its
|
|
ID (name) and its type (a TypeID object).]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.FieldTypeInfo -->
|
|
<!-- start class org.apache.hadoop.record.meta.MapTypeID -->
|
|
<class name="MapTypeID" extends="org.apache.hadoop.record.meta.TypeID"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MapTypeID" type="org.apache.hadoop.record.meta.TypeID, org.apache.hadoop.record.meta.TypeID"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getKeyTypeID" return="org.apache.hadoop.record.meta.TypeID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[get the TypeID of the map's key element]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getValueTypeID" return="org.apache.hadoop.record.meta.TypeID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[get the TypeID of the map's value element]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Two map typeIDs are equal if their constituent elements have the
|
|
same type]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[We use a basic hashcode implementation, since this class will likely not
|
|
be used as a hashmap key]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Represents typeID for a Map]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.MapTypeID -->
|
|
<!-- start class org.apache.hadoop.record.meta.RecordTypeInfo -->
|
|
<class name="RecordTypeInfo" extends="org.apache.hadoop.record.Record"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RecordTypeInfo"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create an empty RecordTypeInfo object.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="RecordTypeInfo" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a RecordTypeInfo object representing a record with the given name
|
|
@param name Name of the record]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[return the name of the record]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setName"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[set the name of the record]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addField"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fieldName" type="java.lang.String"/>
|
|
<param name="tid" type="org.apache.hadoop.record.meta.TypeID"/>
|
|
<doc>
|
|
<![CDATA[Add a field.
|
|
@param fieldName Name of the field
|
|
@param tid Type ID of the field]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFieldTypeInfos" return="java.util.Collection<org.apache.hadoop.record.meta.FieldTypeInfo>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return a collection of field type infos]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getNestedStructTypeInfo" return="org.apache.hadoop.record.meta.RecordTypeInfo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Return the type info of a nested record. We only consider nesting
|
|
to one level.
|
|
@param name Name of the nested record]]>
|
|
</doc>
|
|
</method>
|
|
<method name="serialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rout" type="org.apache.hadoop.record.RecordOutput"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serialize the type information for a record]]>
|
|
</doc>
|
|
</method>
|
|
<method name="deserialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rin" type="org.apache.hadoop.record.RecordInput"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Deserialize the type information for a record]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compareTo" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="peer_" type="java.lang.Object"/>
|
|
<exception name="ClassCastException" type="java.lang.ClassCastException"/>
|
|
<doc>
|
|
<![CDATA[This class doesn't implement Comparable as it's not meant to be used
|
|
for anything besides de/serializing.
|
|
So we always throw an exception.
|
|
Not implemented. Always returns 0 if another RecordTypeInfo is passed in.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A record's Type Information object which can read/write itself.
|
|
|
|
Type information for a record comprises metadata about the record,
|
|
as well as a collection of type information for each field in the record.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.RecordTypeInfo -->
|
|
<!-- start class org.apache.hadoop.record.meta.StructTypeID -->
|
|
<class name="StructTypeID" extends="org.apache.hadoop.record.meta.TypeID"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="StructTypeID" type="org.apache.hadoop.record.meta.RecordTypeInfo"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a StructTypeID based on the RecordTypeInfo of some record]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getFieldTypeInfos" return="java.util.Collection<org.apache.hadoop.record.meta.FieldTypeInfo>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Represents typeID for a struct]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.StructTypeID -->
|
|
<!-- start class org.apache.hadoop.record.meta.TypeID -->
|
|
<class name="TypeID" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="getTypeVal" return="byte"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the type value. One of the constants in RIOType.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Two base typeIDs are equal if they refer to the same type]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[We use a basic hashcode implementation, since this class will likely not
|
|
be used as a hashmap key]]>
|
|
</doc>
|
|
</method>
|
|
<field name="BoolTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constant classes for the basic types, so we can share them.]]>
|
|
</doc>
|
|
</field>
|
|
<field name="BufferTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="ByteTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DoubleTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="FloatTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="IntTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LongTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="StringTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="typeVal" type="byte"
|
|
transient="false" volatile="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[Represents typeID for basic types.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.TypeID -->
|
|
<!-- start class org.apache.hadoop.record.meta.TypeID.RIOType -->
|
|
<class name="TypeID.RIOType" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="TypeID.RIOType"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<field name="BOOL" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="BUFFER" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="BYTE" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="DOUBLE" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="FLOAT" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="INT" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="LONG" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="MAP" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="STRING" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="STRUCT" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="VECTOR" type="byte"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[constants representing the IDL types we support]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.TypeID.RIOType -->
|
|
<!-- start class org.apache.hadoop.record.meta.Utils -->
|
|
<class name="Utils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="skip"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="rin" type="org.apache.hadoop.record.RecordInput"/>
|
|
<param name="tag" type="java.lang.String"/>
|
|
<param name="typeID" type="org.apache.hadoop.record.meta.TypeID"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[read/skip bytes from stream based on a type]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Various utility functions for Hadooop record I/O platform.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.Utils -->
|
|
<!-- start class org.apache.hadoop.record.meta.VectorTypeID -->
|
|
<class name="VectorTypeID" extends="org.apache.hadoop.record.meta.TypeID"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="VectorTypeID" type="org.apache.hadoop.record.meta.TypeID"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getElementTypeID" return="org.apache.hadoop.record.meta.TypeID"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Two vector typeIDs are equal if their constituent elements have the
|
|
same type]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[We use a basic hashcode implementation, since this class will likely not
|
|
be used as a hashmap key]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Represents typeID for vector.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.record.meta.VectorTypeID -->
|
|
</package>
|
|
<package name="org.apache.hadoop.security">
|
|
<!-- start class org.apache.hadoop.security.AccessControlException -->
|
|
<class name="AccessControlException" extends="org.apache.hadoop.fs.permission.AccessControlException"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="AccessControlException"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default constructor is needed for unwrapping from
|
|
{@link org.apache.hadoop.ipc.RemoteException}.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="AccessControlException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructs an {@link AccessControlException}
|
|
with the specified detail message.
|
|
@param s the detail message.]]>
|
|
</doc>
|
|
</constructor>
|
|
<doc>
|
|
<![CDATA[An exception class for access control related issues.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.security.AccessControlException -->
|
|
<!-- start class org.apache.hadoop.security.UnixUserGroupInformation -->
|
|
<class name="UnixUserGroupInformation" extends="org.apache.hadoop.security.UserGroupInformation"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="UnixUserGroupInformation"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Default constructor]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="UnixUserGroupInformation" type="java.lang.String, java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor with parameters user name and its group names.
|
|
The first entry in the groups list is the default group.
|
|
|
|
@param userName a user's name
|
|
@param groupNames groups list, first of which is the default group
|
|
@exception IllegalArgumentException if any argument is null]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="UnixUserGroupInformation" type="java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Constructor with parameter user/group names
|
|
|
|
@param ugi an array containing user/group names, the first
|
|
element of which is the user name, the second of
|
|
which is the default group name.
|
|
@exception IllegalArgumentException if the array size is less than 2
|
|
or any element is null.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="createImmutable" return="org.apache.hadoop.security.UnixUserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ugi" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Create an immutable {@link UnixUserGroupInformation} object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getGroupNames" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return an array of group names]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUserName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return the user's name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFields"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Deserialize this object
|
|
First check if this is a UGI in the string format.
|
|
If no, throw an IOException; otherwise
|
|
set this object's fields by reading them from the given data input
|
|
|
|
@param in input stream
|
|
@exception IOException is thrown if encounter any error when reading]]>
|
|
</doc>
|
|
</method>
|
|
<method name="write"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutput"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Serialize this object
|
|
First write a string marking that this is a UGI in the string format,
|
|
then write this object's serialized form to the given data output
|
|
|
|
@param out output stream
|
|
@exception IOException if encounter any error during writing]]>
|
|
</doc>
|
|
</method>
|
|
<method name="saveToConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="attr" type="java.lang.String"/>
|
|
<param name="ugi" type="org.apache.hadoop.security.UnixUserGroupInformation"/>
|
|
<doc>
|
|
<![CDATA[Store the given <code>ugi</code> as a comma separated string in
|
|
<code>conf</code> as a property <code>attr</code>
|
|
|
|
The String starts with the user name followed by the default group names,
|
|
and other group names.
|
|
|
|
@param conf configuration
|
|
@param attr property name
|
|
@param ugi a UnixUserGroupInformation]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFromConf" return="org.apache.hadoop.security.UnixUserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="attr" type="java.lang.String"/>
|
|
<exception name="LoginException" type="javax.security.auth.login.LoginException"/>
|
|
<doc>
|
|
<![CDATA[Read a UGI from the given <code>conf</code>
|
|
|
|
The object is expected to store with the property name <code>attr</code>
|
|
as a comma separated string that starts
|
|
with the user name followed by group names.
|
|
If the property name is not defined, return null.
|
|
It's assumed that there is only one UGI per user. If this user already
|
|
has a UGI in the ugi map, return the ugi in the map.
|
|
Otherwise, construct a UGI from the configuration, store it in the
|
|
ugi map and return it.
|
|
|
|
@param conf configuration
|
|
@param attr property name
|
|
@return a UnixUGI
|
|
@throws LoginException if the stored string is ill-formatted.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="login" return="org.apache.hadoop.security.UnixUserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="LoginException" type="javax.security.auth.login.LoginException"/>
|
|
<doc>
|
|
<![CDATA[Get current user's name and the names of all its groups from Unix.
|
|
It's assumed that there is only one UGI per user. If this user already
|
|
has a UGI in the ugi map, return the ugi in the map.
|
|
Otherwise get the current user's information from Unix, store it
|
|
in the map, and return it.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="login" return="org.apache.hadoop.security.UnixUserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="LoginException" type="javax.security.auth.login.LoginException"/>
|
|
<doc>
|
|
<![CDATA[Equivalent to login(conf, false).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="login" return="org.apache.hadoop.security.UnixUserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="save" type="boolean"/>
|
|
<exception name="LoginException" type="javax.security.auth.login.LoginException"/>
|
|
<doc>
|
|
<![CDATA[Get a user's name & its group names from the given configuration;
|
|
If it is not defined in the configuration, get the current user's
|
|
information from Unix.
|
|
If the user has a UGI in the ugi map, return the one in
|
|
the UGI map.
|
|
|
|
@param conf either a job configuration or client's configuration
|
|
@param save saving it to conf?
|
|
@return UnixUserGroupInformation a user/group information
|
|
@exception LoginException if not able to get the user/group information]]>
|
|
</doc>
|
|
</method>
|
|
<method name="equals" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="other" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Decide if two UGIs are the same
|
|
|
|
@param other other object
|
|
@return true if they are the same; false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hashCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a hash code for this UGI.
|
|
The hash code for a UGI is the hash code of its user name string.
|
|
|
|
@return a hash code value for this UGI.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Convert this object to a string
|
|
|
|
@return a comma separated string containing the user name and group names]]>
|
|
</doc>
|
|
</method>
|
|
<field name="UGI_PROPERTY_NAME" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An implementation of UserGroupInformation in the Unix system]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.security.UnixUserGroupInformation -->
|
|
<!-- start class org.apache.hadoop.security.UserGroupInformation -->
|
|
<class name="UserGroupInformation" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.io.Writable"/>
|
|
<constructor name="UserGroupInformation"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getCurrentUGI" return="org.apache.hadoop.security.UserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@return the {@link UserGroupInformation} for the current thread]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setCurrentUGI"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="ugi" type="org.apache.hadoop.security.UserGroupInformation"/>
|
|
<doc>
|
|
<![CDATA[Set the {@link UserGroupInformation} for the current thread]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUserName" return="java.lang.String"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get username
|
|
|
|
@return the user's name]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getGroupNames" return="java.lang.String[]"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the name of the groups that the user belong to
|
|
|
|
@return an array of group names]]>
|
|
</doc>
|
|
</method>
|
|
<method name="login" return="org.apache.hadoop.security.UserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="LoginException" type="javax.security.auth.login.LoginException"/>
|
|
<doc>
|
|
<![CDATA[Login and return a UserGroupInformation object.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readFrom" return="org.apache.hadoop.security.UserGroupInformation"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read a {@link UserGroupInformation} from conf]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A {@link Writable} abstract class for storing user and groups information.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.security.UserGroupInformation -->
|
|
</package>
|
|
<package name="org.apache.hadoop.tools">
|
|
<!-- start class org.apache.hadoop.tools.DistCp -->
|
|
<class name="DistCp" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="DistCp" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="copy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="srcPath" type="java.lang.String"/>
|
|
<param name="destPath" type="java.lang.String"/>
|
|
<param name="logPath" type="org.apache.hadoop.fs.Path"/>
|
|
<param name="srcAsList" type="boolean"/>
|
|
<param name="ignoreReadFailures" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[This is the main driver for recursively copying directories
|
|
across file systems. It takes at least two cmdline parameters. A source
|
|
URL and a destination URL. It then essentially does an "ls -lR" on the
|
|
source URL, and writes the output in a round-robin manner to all the map
|
|
input files. The mapper actually copies the files allotted to it. The
|
|
reduce is empty.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
</method>
|
|
<method name="getRandomId" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A Map-reduce program to recursively copy directories between
|
|
different file-systems.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.tools.DistCp -->
|
|
<!-- start class org.apache.hadoop.tools.DistCp.DuplicationException -->
|
|
<class name="DistCp.DuplicationException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<field name="ERROR_CODE" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Error code for this exception]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[An exception class for duplicated source files.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.tools.DistCp.DuplicationException -->
|
|
<!-- start class org.apache.hadoop.tools.HadoopArchives -->
|
|
<class name="HadoopArchives" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.Tool"/>
|
|
<constructor name="HadoopArchives" type="org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="archive"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="srcPaths" type="java.util.List<org.apache.hadoop.fs.Path>"/>
|
|
<param name="archiveName" type="java.lang.String"/>
|
|
<param name="dest" type="org.apache.hadoop.fs.Path"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[archive the given source paths into
|
|
the dest
|
|
@param srcPaths the src paths to be archived
|
|
@param dest the dest dir that will contain the archive]]>
|
|
</doc>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[the main driver for creating the archives
|
|
it takes at least two command line parameters. The src and the
|
|
dest. It does an lsr on the source paths.
|
|
The mapper created archuves and the reducer creates
|
|
the archive index.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[the main functions]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[a archive creation utility.
|
|
This class provides methods that can be used
|
|
to create hadoop archives. For understanding of
|
|
Hadoop archives look at {@link HarFileSystem}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.tools.HadoopArchives -->
|
|
<!-- start class org.apache.hadoop.tools.Logalyzer -->
|
|
<class name="Logalyzer" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Logalyzer"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="doArchive"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="logListURI" type="java.lang.String"/>
|
|
<param name="archiveDirectory" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[doArchive: Workhorse function to archive log-files.
|
|
@param logListURI : The uri which will serve list of log-files to archive.
|
|
@param archiveDirectory : The directory to store archived logfiles.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="doAnalyze"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="inputFilesDirectory" type="java.lang.String"/>
|
|
<param name="outputDirectory" type="java.lang.String"/>
|
|
<param name="grepPattern" type="java.lang.String"/>
|
|
<param name="sortColumns" type="java.lang.String"/>
|
|
<param name="columnSeparator" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[doAnalyze:
|
|
@param inputFilesDirectory : Directory containing the files to be analyzed.
|
|
@param outputDirectory : Directory to store analysis (output).
|
|
@param grepPattern : Pattern to *grep* for.
|
|
@param sortColumns : Sort specification for output.
|
|
@param columnSeparator : Column separator.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Logalyzer: A utility tool for archiving and analyzing hadoop logs.
|
|
<p>
|
|
This tool supports archiving and anaylzing (sort/grep) of log-files.
|
|
It takes as input
|
|
a) Input uri which will serve uris of the logs to be archived.
|
|
b) Output directory (not mandatory).
|
|
b) Directory on dfs to archive the logs.
|
|
c) The sort/grep patterns for analyzing the files and separator for boundaries.
|
|
Usage:
|
|
Logalyzer -archive -archiveDir <directory to archive logs> -analysis <directory> -logs <log-list uri> -grep <pattern> -sort <col1, col2> -separator <separator>
|
|
<p>]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.tools.Logalyzer -->
|
|
<!-- start class org.apache.hadoop.tools.Logalyzer.LogComparator -->
|
|
<class name="Logalyzer.LogComparator" extends="org.apache.hadoop.io.Text.Comparator"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<constructor name="Logalyzer.LogComparator"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
</method>
|
|
<method name="getConf" return="org.apache.hadoop.conf.Configuration"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b1" type="byte[]"/>
|
|
<param name="s1" type="int"/>
|
|
<param name="l1" type="int"/>
|
|
<param name="b2" type="byte[]"/>
|
|
<param name="s2" type="int"/>
|
|
<param name="l2" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A WritableComparator optimized for UTF8 keys of the logs.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.tools.Logalyzer.LogComparator -->
|
|
<!-- start class org.apache.hadoop.tools.Logalyzer.LogRegexMapper -->
|
|
<class name="Logalyzer.LogRegexMapper" extends="org.apache.hadoop.mapred.MapReduceBase"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.mapred.Mapper<K, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, org.apache.hadoop.io.LongWritable>"/>
|
|
<constructor name="Logalyzer.LogRegexMapper"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="configure"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="job" type="org.apache.hadoop.mapred.JobConf"/>
|
|
</method>
|
|
<method name="map"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="key" type="K extends org.apache.hadoop.io.WritableComparable"/>
|
|
<param name="value" type="org.apache.hadoop.io.Text"/>
|
|
<param name="output" type="org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Text, org.apache.hadoop.io.LongWritable>"/>
|
|
<param name="reporter" type="org.apache.hadoop.mapred.Reporter"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A {@link Mapper} that extracts text matching a regular expression.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.tools.Logalyzer.LogRegexMapper -->
|
|
</package>
|
|
<package name="org.apache.hadoop.util">
|
|
<!-- start class org.apache.hadoop.util.CyclicIteration -->
|
|
<class name="CyclicIteration" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.lang.Iterable<java.util.Map.Entry<K, V>>"/>
|
|
<constructor name="CyclicIteration" type="java.util.NavigableMap<K, V>, K"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct an {@link Iterable} object,
|
|
so that an {@link Iterator} can be created
|
|
for iterating the given {@link NavigableMap}.
|
|
The iteration begins from the starting key exclusively.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="iterator" return="java.util.Iterator<java.util.Map.Entry<K, V>>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Provide an cyclic {@link Iterator} for a {@link NavigableMap}.
|
|
The {@link Iterator} navigates the entries of the map
|
|
according to the map's ordering.
|
|
If the {@link Iterator} hits the last entry of the map,
|
|
it will then continue from the first entry.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.CyclicIteration -->
|
|
<!-- start class org.apache.hadoop.util.Daemon -->
|
|
<class name="Daemon" extends="java.lang.Thread"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Daemon"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a daemon thread.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Daemon" type="java.lang.Runnable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a daemon thread.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="Daemon" type="java.lang.ThreadGroup, java.lang.Runnable"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Construct a daemon thread to be part of a specified thread group.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getRunnable" return="java.lang.Runnable"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A thread that has called {@link Thread#setDaemon(boolean) } with true.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.Daemon -->
|
|
<!-- start class org.apache.hadoop.util.DataChecksum -->
|
|
<class name="DataChecksum" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="java.util.zip.Checksum"/>
|
|
<method name="newDataChecksum" return="org.apache.hadoop.util.DataChecksum"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="type" type="int"/>
|
|
<param name="bytesPerChecksum" type="int"/>
|
|
</method>
|
|
<method name="newDataChecksum" return="org.apache.hadoop.util.DataChecksum"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<doc>
|
|
<![CDATA[Creates a DataChecksum from HEADER_LEN bytes from arr[offset].
|
|
@return DataChecksum of the type in the array or null in case of an error.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="newDataChecksum" return="org.apache.hadoop.util.DataChecksum"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="in" type="java.io.DataInputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[This constructucts a DataChecksum by reading HEADER_LEN bytes from
|
|
input stream <i>in</i>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeHeader"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutputStream"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes the checksum header to the output stream <i>out</i>.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHeader" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="writeValue" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.DataOutputStream"/>
|
|
<param name="reset" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes the current checksum to the stream.
|
|
If <i>reset</i> is true, then resets the checksum.
|
|
@return number of bytes written. Will be equal to getChecksumSize();]]>
|
|
</doc>
|
|
</method>
|
|
<method name="writeValue" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<param name="reset" type="boolean"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Writes the current checksum to a buffer.
|
|
If <i>reset</i> is true, then resets the checksum.
|
|
@return number of bytes written. Will be equal to getChecksumSize();]]>
|
|
</doc>
|
|
</method>
|
|
<method name="compare" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="buf" type="byte[]"/>
|
|
<param name="offset" type="int"/>
|
|
<doc>
|
|
<![CDATA[Compares the checksum located at buf[offset] with the current checksum.
|
|
@return true if the checksum matches and false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getChecksumType" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getChecksumSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getBytesPerChecksum" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getNumBytesInSum" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getChecksumHeaderSize" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getValue" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="reset"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="update"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="byte[]"/>
|
|
<param name="off" type="int"/>
|
|
<param name="len" type="int"/>
|
|
</method>
|
|
<method name="update"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="b" type="int"/>
|
|
</method>
|
|
<field name="HEADER_LEN" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="CHECKSUM_NULL" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="CHECKSUM_CRC32" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="SIZE_OF_INTEGER" type="int"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[This class provides inteface and utilities for processing checksums for
|
|
DFS data transfers.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.DataChecksum -->
|
|
<!-- start class org.apache.hadoop.util.DiskChecker -->
|
|
<class name="DiskChecker" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DiskChecker"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="mkdirsWithExistsCheck" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="java.io.File"/>
|
|
<doc>
|
|
<![CDATA[The semantics of mkdirsWithExistsCheck method is different from the mkdirs
|
|
method provided in the Sun's java.io.File class in the following way:
|
|
While creating the non-existent parent directories, this method checks for
|
|
the existence of those directories if the mkdir fails at any point (since
|
|
that directory might have just been created by some other process).
|
|
If both mkdir() and the exists() check fails for any seemingly
|
|
non-existent directory, then we signal an error; Sun's mkdir would signal
|
|
an error (return false) if a directory it is attempting to create already
|
|
exists or the mkdir fails.
|
|
@param dir
|
|
@return true on success, false on failure]]>
|
|
</doc>
|
|
</method>
|
|
<method name="checkDir"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="java.io.File"/>
|
|
<exception name="DiskChecker.DiskErrorException" type="org.apache.hadoop.util.DiskChecker.DiskErrorException"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Class that provides utility functions for checking disk problem]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.DiskChecker -->
|
|
<!-- start class org.apache.hadoop.util.DiskChecker.DiskErrorException -->
|
|
<class name="DiskChecker.DiskErrorException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DiskChecker.DiskErrorException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.DiskChecker.DiskErrorException -->
|
|
<!-- start class org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException -->
|
|
<class name="DiskChecker.DiskOutOfSpaceException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="DiskChecker.DiskOutOfSpaceException" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException -->
|
|
<!-- start class org.apache.hadoop.util.GenericOptionsParser -->
|
|
<class name="GenericOptionsParser" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="GenericOptionsParser" type="org.apache.hadoop.conf.Configuration, java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a <code>GenericOptionsParser<code> to parse only the generic Hadoop
|
|
arguments.
|
|
|
|
The array of string arguments other than the generic arguments can be
|
|
obtained by {@link #getRemainingArgs()}.
|
|
|
|
@param conf the <code>Configuration</code> to modify.
|
|
@param args command-line arguments.]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="GenericOptionsParser" type="org.apache.hadoop.conf.Configuration, org.apache.commons.cli.Options, java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a <code>GenericOptionsParser</code> to parse given options as well
|
|
as generic Hadoop options.
|
|
|
|
The resulting <code>CommandLine</code> object can be obtained by
|
|
{@link #getCommandLine()}.
|
|
|
|
@param conf the configuration to modify
|
|
@param options options built by the caller
|
|
@param args User-specified arguments]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getRemainingArgs" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns an array of Strings containing only application-specific arguments.
|
|
|
|
@return array of <code>String</code>s containing the un-parsed arguments
|
|
or <strong>empty array</strong> if commandLine was not defined.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCommandLine" return="org.apache.commons.cli.CommandLine"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the commons-cli <code>CommandLine</code> object
|
|
to process the parsed arguments.
|
|
|
|
Note: If the object is created with
|
|
{@link #GenericOptionsParser(Configuration, String[])}, then returned
|
|
object will only contain parsed generic options.
|
|
|
|
@return <code>CommandLine</code> representing list of arguments
|
|
parsed against Options descriptor.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLibJars" return="java.net.URL[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[If libjars are set in the conf, parse the libjars.
|
|
@param conf
|
|
@return libjar urls
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="printGenericCommandUsage"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.PrintStream"/>
|
|
<doc>
|
|
<![CDATA[Print the usage message for generic command-line options supported.
|
|
|
|
@param out stream to print the usage message to.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[<code>GenericOptionsParser</code> is a utility to parse command line
|
|
arguments generic to the Hadoop framework.
|
|
|
|
<code>GenericOptionsParser</code> recognizes several standarad command
|
|
line arguments, enabling applications to easily specify a namenode, a
|
|
jobtracker, additional configuration resources etc.
|
|
|
|
<h4 id="GenericOptions">Generic Options</h4>
|
|
|
|
<p>The supported generic options are:</p>
|
|
<p><blockquote><pre>
|
|
-conf <configuration file> specify a configuration file
|
|
-D <property=value> use value for given property
|
|
-fs <local|namenode:port> specify a namenode
|
|
-jt <local|jobtracker:port> specify a job tracker
|
|
-files <comma separated list of files> specify comma separated
|
|
files to be copied to the map reduce cluster
|
|
-libjars <comma separated list of jars> specify comma separated
|
|
jar files to include in the classpath.
|
|
-archives <comma separated list of archives> specify comma
|
|
separated archives to be unarchived on the compute machines.
|
|
|
|
</pre></blockquote></p>
|
|
|
|
<p>The general command line syntax is:</p>
|
|
<p><tt><pre>
|
|
bin/hadoop command [genericOptions] [commandOptions]
|
|
</pre></tt></p>
|
|
|
|
<p>Generic command line arguments <strong>might</strong> modify
|
|
<code>Configuration </code> objects, given to constructors.</p>
|
|
|
|
<p>The functionality is implemented using Commons CLI.</p>
|
|
|
|
<p>Examples:</p>
|
|
<p><blockquote><pre>
|
|
$ bin/hadoop dfs -fs darwin:8020 -ls /data
|
|
list /data directory in dfs with namenode darwin:8020
|
|
|
|
$ bin/hadoop dfs -D fs.default.name=darwin:8020 -ls /data
|
|
list /data directory in dfs with namenode darwin:8020
|
|
|
|
$ bin/hadoop dfs -conf hadoop-site.xml -ls /data
|
|
list /data directory in dfs with conf specified in hadoop-site.xml
|
|
|
|
$ bin/hadoop job -D mapred.job.tracker=darwin:50020 -submit job.xml
|
|
submit a job to job tracker darwin:50020
|
|
|
|
$ bin/hadoop job -jt darwin:50020 -submit job.xml
|
|
submit a job to job tracker darwin:50020
|
|
|
|
$ bin/hadoop job -jt local -submit job.xml
|
|
submit a job to local runner
|
|
|
|
$ bin/hadoop jar -libjars testlib.jar
|
|
-archives test.tgz -files file.txt inputjar args
|
|
job submission with libjars, files and archives
|
|
</pre></blockquote></p>
|
|
|
|
@see Tool
|
|
@see ToolRunner]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.GenericOptionsParser -->
|
|
<!-- start class org.apache.hadoop.util.GenericsUtil -->
|
|
<class name="GenericsUtil" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="GenericsUtil"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getClass" return="java.lang.Class<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="t" type="T"/>
|
|
<doc>
|
|
<![CDATA[Returns the Class object (of type <code>Class<T></code>) of the
|
|
argument of type <code>T</code>.
|
|
@param <T> The type of the argument
|
|
@param t the object to get it class
|
|
@return <code>Class<T></code>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toArray" return="T[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="c" type="java.lang.Class<T>"/>
|
|
<param name="list" type="java.util.List<T>"/>
|
|
<doc>
|
|
<![CDATA[Converts the given <code>List<T></code> to a an array of
|
|
<code>T[]</code>.
|
|
@param c the Class object of the items in the list
|
|
@param list the list to convert]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toArray" return="T[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="list" type="java.util.List<T>"/>
|
|
<doc>
|
|
<![CDATA[Converts the given <code>List<T></code> to a an array of
|
|
<code>T[]</code>.
|
|
@param list the list to convert
|
|
@throws ArrayIndexOutOfBoundsException if the list is empty.
|
|
Use {@link #toArray(Class, List)} if the list may be empty.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Contains utility methods for dealing with Java Generics.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.GenericsUtil -->
|
|
<!-- start class org.apache.hadoop.util.HeapSort -->
|
|
<class name="HeapSort" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.IndexedSorter"/>
|
|
<constructor name="HeapSort"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="org.apache.hadoop.util.IndexedSortable"/>
|
|
<param name="p" type="int"/>
|
|
<param name="r" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sort the given range of items using heap sort.
|
|
{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="org.apache.hadoop.util.IndexedSortable"/>
|
|
<param name="p" type="int"/>
|
|
<param name="r" type="int"/>
|
|
<param name="rep" type="org.apache.hadoop.util.Progressable"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An implementation of the core algorithm of HeapSort.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.HeapSort -->
|
|
<!-- start class org.apache.hadoop.util.HostsFileReader -->
|
|
<class name="HostsFileReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="HostsFileReader" type="java.lang.String, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</constructor>
|
|
<method name="refresh"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getHosts" return="java.util.Set<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="getExcludedHosts" return="java.util.Set<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="setIncludesFile"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="includesFile" type="java.lang.String"/>
|
|
</method>
|
|
<method name="setExcludesFile"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="excludesFile" type="java.lang.String"/>
|
|
</method>
|
|
<method name="updateFileNames"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="includesFile" type="java.lang.String"/>
|
|
<param name="excludesFile" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.HostsFileReader -->
|
|
<!-- start interface org.apache.hadoop.util.IndexedSortable -->
|
|
<interface name="IndexedSortable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="compare" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<param name="j" type="int"/>
|
|
<doc>
|
|
<![CDATA[Compare items at the given addresses consistent with the semantics of
|
|
{@link java.util.Comparable#compare}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="swap"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="i" type="int"/>
|
|
<param name="j" type="int"/>
|
|
<doc>
|
|
<![CDATA[Swap items at the given addresses.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface for collections capable of being sorted by {@link IndexedSorter}
|
|
algorithms.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.util.IndexedSortable -->
|
|
<!-- start interface org.apache.hadoop.util.IndexedSorter -->
|
|
<interface name="IndexedSorter" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="org.apache.hadoop.util.IndexedSortable"/>
|
|
<param name="l" type="int"/>
|
|
<param name="r" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sort the items accessed through the given IndexedSortable over the given
|
|
range of logical indices. From the perspective of the sort algorithm,
|
|
each index between l (inclusive) and r (exclusive) is an addressable
|
|
entry.
|
|
@see IndexedSortable#compare
|
|
@see IndexedSortable#swap]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="org.apache.hadoop.util.IndexedSortable"/>
|
|
<param name="l" type="int"/>
|
|
<param name="r" type="int"/>
|
|
<param name="rep" type="org.apache.hadoop.util.Progressable"/>
|
|
<doc>
|
|
<![CDATA[Same as {@link #sort(IndexedSortable,int,int)}, but indicate progress
|
|
periodically.
|
|
@see #sort(IndexedSortable,int,int)]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Interface for sort algorithms accepting {@link IndexedSortable} items.
|
|
|
|
A sort algorithm implementing this interface may only
|
|
{@link IndexedSortable#compare} and {@link IndexedSortable#swap} items
|
|
for a range of indices to effect a sort across that range.]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.util.IndexedSorter -->
|
|
<!-- start class org.apache.hadoop.util.LineReader -->
|
|
<class name="LineReader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="LineReader" type="java.io.InputStream"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a line reader that reads from the given stream using the
|
|
default buffer-size (64k).
|
|
@param in The input stream
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="LineReader" type="java.io.InputStream, int"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Create a line reader that reads from the given stream using the
|
|
given buffer-size.
|
|
@param in The input stream
|
|
@param bufferSize Size of the read buffer
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<constructor name="LineReader" type="java.io.InputStream, org.apache.hadoop.conf.Configuration"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Create a line reader that reads from the given stream using the
|
|
<code>io.file.buffer.size</code> specified in the given
|
|
<code>Configuration</code>.
|
|
@param in input stream
|
|
@param conf configuration
|
|
@throws IOException]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="close"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Close the underlying stream.
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readLine" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="org.apache.hadoop.io.Text"/>
|
|
<param name="maxLineLength" type="int"/>
|
|
<param name="maxBytesToConsume" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read from the InputStream into the given Text.
|
|
@param str the object to store the given line
|
|
@param maxLineLength the maximum number of bytes to store into str.
|
|
@param maxBytesToConsume the maximum number of bytes to consume in this call.
|
|
@return the number of bytes read including the newline
|
|
@throws IOException if the underlying stream throws]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readLine" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="org.apache.hadoop.io.Text"/>
|
|
<param name="maxLineLength" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read from the InputStream into the given Text.
|
|
@param str the object to store the given line
|
|
@param maxLineLength the maximum number of bytes to store into str.
|
|
@return the number of bytes read including the newline
|
|
@throws IOException if the underlying stream throws]]>
|
|
</doc>
|
|
</method>
|
|
<method name="readLine" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="org.apache.hadoop.io.Text"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Read from the InputStream into the given Text.
|
|
@param str the object to store the given line
|
|
@return the number of bytes read including the newline
|
|
@throws IOException if the underlying stream throws]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A class that provides a line reader from an input stream.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.LineReader -->
|
|
<!-- start class org.apache.hadoop.util.MergeSort -->
|
|
<class name="MergeSort" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="MergeSort" type="java.util.Comparator<org.apache.hadoop.io.IntWritable>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="mergeSort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="src" type="int[]"/>
|
|
<param name="dest" type="int[]"/>
|
|
<param name="low" type="int"/>
|
|
<param name="high" type="int"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An implementation of the core algorithm of MergeSort.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.MergeSort -->
|
|
<!-- start class org.apache.hadoop.util.NativeCodeLoader -->
|
|
<class name="NativeCodeLoader" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="NativeCodeLoader"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="isNativeCodeLoaded" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Check if native-hadoop code is loaded for this platform.
|
|
|
|
@return <code>true</code> if native-hadoop is loaded,
|
|
else <code>false</code>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getLoadNativeLibraries" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Return if native hadoop libraries, if present, can be used for this job.
|
|
@param conf configuration
|
|
|
|
@return <code>true</code> if native hadoop libraries, if present, can be
|
|
used for this job; <code>false</code> otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setLoadNativeLibraries"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="loadNativeLibraries" type="boolean"/>
|
|
<doc>
|
|
<![CDATA[Set if native hadoop libraries, if present, can be used for this job.
|
|
|
|
@param conf configuration
|
|
@param loadNativeLibraries can native hadoop libraries be loaded]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A helper to load the native hadoop code i.e. libhadoop.so.
|
|
This handles the fallback to either the bundled libhadoop-Linux-i386-32.so
|
|
or the the default java implementations where appropriate.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.NativeCodeLoader -->
|
|
<!-- start class org.apache.hadoop.util.PlatformName -->
|
|
<class name="PlatformName" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="PlatformName"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getPlatformName" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the complete platform as per the java-vm.
|
|
@return returns the complete platform as per the java-vm.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A helper class for getting build-info of the java-vm.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.PlatformName -->
|
|
<!-- start class org.apache.hadoop.util.PrintJarMainClass -->
|
|
<class name="PrintJarMainClass" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="PrintJarMainClass"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[@param args]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A micro-application that prints the main class name out of a jar file.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.PrintJarMainClass -->
|
|
<!-- start class org.apache.hadoop.util.PriorityQueue -->
|
|
<class name="PriorityQueue" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="PriorityQueue"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="lessThan" return="boolean"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="a" type="java.lang.Object"/>
|
|
<param name="b" type="java.lang.Object"/>
|
|
<doc>
|
|
<![CDATA[Determines the ordering of objects in this priority queue. Subclasses
|
|
must define this one method.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="initialize"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="maxSize" type="int"/>
|
|
<doc>
|
|
<![CDATA[Subclass constructors must call this.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="put"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="element" type="T"/>
|
|
<doc>
|
|
<![CDATA[Adds an Object to a PriorityQueue in log(size) time.
|
|
If one tries to add more objects than maxSize from initialize
|
|
a RuntimeException (ArrayIndexOutOfBound) is thrown.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="insert" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="element" type="T"/>
|
|
<doc>
|
|
<![CDATA[Adds element to the PriorityQueue in log(size) time if either
|
|
the PriorityQueue is not full, or not lessThan(element, top()).
|
|
@param element
|
|
@return true if element is added, false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="top" return="T"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the least element of the PriorityQueue in constant time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="pop" return="T"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Removes and returns the least element of the PriorityQueue in log(size)
|
|
time.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="adjustTop"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Should be called when the Object at top changes values. Still log(n)
|
|
worst case, but it's at least twice as fast to <pre>
|
|
{ pq.top().change(); pq.adjustTop(); }
|
|
</pre> instead of <pre>
|
|
{ o = pq.pop(); o.change(); pq.push(o); }
|
|
</pre>]]>
|
|
</doc>
|
|
</method>
|
|
<method name="size" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the number of elements currently stored in the PriorityQueue.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="clear"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Removes all entries from the PriorityQueue.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A PriorityQueue maintains a partial ordering of its elements such that the
|
|
least element can always be found in constant time. Put()'s and pop()'s
|
|
require log(size) time.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.PriorityQueue -->
|
|
<!-- start class org.apache.hadoop.util.ProcfsBasedProcessTree -->
|
|
<class name="ProcfsBasedProcessTree" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ProcfsBasedProcessTree" type="java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setSigKillInterval"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="interval" type="long"/>
|
|
</method>
|
|
<method name="isAvailable" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Checks if the ProcfsBasedProcessTree is available on this system.
|
|
|
|
@return true if ProcfsBasedProcessTree is available. False otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProcessTree" return="org.apache.hadoop.util.ProcfsBasedProcessTree"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the process-tree with latest state.
|
|
|
|
@return the process-tree with latest state.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="isAlive" return="boolean"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Is the process-tree alive? Currently we care only about the status of the
|
|
root-process.
|
|
|
|
@return true if the process-true is alive, false otherwise.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="destroy"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Destroy the process-tree. Currently we only make sure the root process is
|
|
gone. It is the responsibility of the root process to make sure that all
|
|
its descendants are cleaned up.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getCumulativeVmem" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the cumulative virtual memory used by all the processes in the
|
|
process-tree.
|
|
|
|
@return cumulative virtual memory used by the process-tree in kilobytes.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getPidFromPidFile" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="pidFileName" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get PID from a pid-file.
|
|
|
|
@param pidFileName
|
|
Name of the pid-file.
|
|
@return the PID string read from the pid-file. Returns null if the
|
|
pidFileName points to a non-existing file or if read fails from the
|
|
file.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns a string printing PIDs of process present in the
|
|
ProcfsBasedProcessTree. Output format : [pid pid ..]]]>
|
|
</doc>
|
|
</method>
|
|
<field name="DEFAULT_SLEEPTIME_BEFORE_SIGKILL" type="long"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A Proc file-system based ProcessTree. Works only on Linux.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.ProcfsBasedProcessTree -->
|
|
<!-- start class org.apache.hadoop.util.ProgramDriver -->
|
|
<class name="ProgramDriver" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ProgramDriver"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="addClass"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
<param name="mainClass" type="java.lang.Class"/>
|
|
<param name="description" type="java.lang.String"/>
|
|
<exception name="Throwable" type="java.lang.Throwable"/>
|
|
<doc>
|
|
<![CDATA[This is the method that adds the classed to the repository
|
|
@param name The name of the string you want the class instance to be called with
|
|
@param mainClass The class that you want to add to the repository
|
|
@param description The description of the class
|
|
@throws NoSuchMethodException
|
|
@throws SecurityException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="driver"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Throwable" type="java.lang.Throwable"/>
|
|
<doc>
|
|
<![CDATA[This is a driver for the example programs.
|
|
It looks at the first command line argument and tries to find an
|
|
example program with that name.
|
|
If it is found, it calls the main method in that class with the rest
|
|
of the command line arguments.
|
|
@param args The argument from the user. args[0] is the command to run.
|
|
@throws NoSuchMethodException
|
|
@throws SecurityException
|
|
@throws IllegalAccessException
|
|
@throws IllegalArgumentException
|
|
@throws Throwable Anything thrown by the example program's main]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A driver that is used to run programs added to it]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.ProgramDriver -->
|
|
<!-- start class org.apache.hadoop.util.Progress -->
|
|
<class name="Progress" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Progress"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Creates a new root node.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="addPhase" return="org.apache.hadoop.util.Progress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="status" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Adds a named node to the tree.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="addPhase" return="org.apache.hadoop.util.Progress"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Adds a node to the tree.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startNextPhase"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Called during execution to move to the next phase at this level in the
|
|
tree.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="phase" return="org.apache.hadoop.util.Progress"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the current sub-node executing.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="complete"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Completes this node, moving the parent node to its next child.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="set"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="progress" type="float"/>
|
|
<doc>
|
|
<![CDATA[Called during execution on a leaf node to set its progress.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="get" return="float"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the overall progress of the root.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setStatus"
|
|
abstract="false" native="false" synchronized="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="status" type="java.lang.String"/>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Utility to assist with generation of progress reports. Applications build
|
|
a hierarchy of {@link Progress} instances, each modelling a phase of
|
|
execution. The root is constructed with {@link #Progress()}. Nodes for
|
|
sub-phases are created by calling {@link #addPhase()}.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.Progress -->
|
|
<!-- start interface org.apache.hadoop.util.Progressable -->
|
|
<interface name="Progressable" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="progress"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Report progress to the Hadoop framework.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A facility for reporting progress.
|
|
|
|
<p>Clients and/or applications can use the provided <code>Progressable</code>
|
|
to explicitly report progress to the Hadoop framework. This is especially
|
|
important for operations which take an insignificant amount of time since,
|
|
in-lieu of the reported progress, the framework has to assume that an error
|
|
has occured and time-out the operation.</p>]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.util.Progressable -->
|
|
<!-- start class org.apache.hadoop.util.QuickSort -->
|
|
<class name="QuickSort" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.util.IndexedSorter"/>
|
|
<constructor name="QuickSort"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getMaxDepth" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="x" type="int"/>
|
|
<doc>
|
|
<![CDATA[Deepest recursion before giving up and doing a heapsort.
|
|
Returns 2 * ceil(log(n)).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="org.apache.hadoop.util.IndexedSortable"/>
|
|
<param name="p" type="int"/>
|
|
<param name="r" type="int"/>
|
|
<doc>
|
|
<![CDATA[Sort the given range of items using quick sort.
|
|
{@inheritDoc} If the recursion depth falls below {@link #getMaxDepth},
|
|
then switch to {@link HeapSort}.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="sort"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="org.apache.hadoop.util.IndexedSortable"/>
|
|
<param name="p" type="int"/>
|
|
<param name="r" type="int"/>
|
|
<param name="rep" type="org.apache.hadoop.util.Progressable"/>
|
|
<doc>
|
|
<![CDATA[{@inheritDoc}]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[An implementation of the core algorithm of QuickSort.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.QuickSort -->
|
|
<!-- start class org.apache.hadoop.util.ReflectionUtils -->
|
|
<class name="ReflectionUtils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ReflectionUtils"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="setConf"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theObject" type="java.lang.Object"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Check and set 'configuration' if necessary.
|
|
|
|
@param theObject object for which to set configuration
|
|
@param conf Configuration]]>
|
|
</doc>
|
|
</method>
|
|
<method name="newInstance" return="T"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="theClass" type="java.lang.Class<T>"/>
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Create an object for the given class and initialize it from conf
|
|
|
|
@param theClass class of which an object is created
|
|
@param conf Configuration
|
|
@return a new object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setContentionTracing"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="val" type="boolean"/>
|
|
</method>
|
|
<method name="printThreadInfo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="stream" type="java.io.PrintWriter"/>
|
|
<param name="title" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Print all of the thread's information and stack traces.
|
|
|
|
@param stream the stream to
|
|
@param title a string title for the stack trace]]>
|
|
</doc>
|
|
</method>
|
|
<method name="logThreadInfo"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="log" type="org.apache.commons.logging.Log"/>
|
|
<param name="title" type="java.lang.String"/>
|
|
<param name="minInterval" type="long"/>
|
|
<doc>
|
|
<![CDATA[Log the current thread stacks at INFO level.
|
|
@param log the logger that logs the stack trace
|
|
@param title a descriptive title for the call stacks
|
|
@param minInterval the minimum time from the last]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getClass" return="java.lang.Class<T>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="o" type="T"/>
|
|
<doc>
|
|
<![CDATA[Return the correctly-typed {@link Class} of the given object.
|
|
|
|
@param o object whose correctly-typed <code>Class</code> is to be obtained
|
|
@return the correctly typed <code>Class</code> of the given object.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[General reflection utils]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.ReflectionUtils -->
|
|
<!-- start class org.apache.hadoop.util.RunJar -->
|
|
<class name="RunJar" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="RunJar"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="unJar"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="jarFile" type="java.io.File"/>
|
|
<param name="toDir" type="java.io.File"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Unpack a jar file into a directory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Throwable" type="java.lang.Throwable"/>
|
|
<doc>
|
|
<![CDATA[Run a Hadoop job jar. If the main class is not in the jar's manifest,
|
|
then it must be provided on the command line.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[Run a Hadoop job jar.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.RunJar -->
|
|
<!-- start class org.apache.hadoop.util.ServletUtil -->
|
|
<class name="ServletUtil" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ServletUtil"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="initHTML" return="java.io.PrintWriter"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="response" type="javax.servlet.ServletResponse"/>
|
|
<param name="title" type="java.lang.String"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Initial HTML header]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getParameter" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="request" type="javax.servlet.ServletRequest"/>
|
|
<param name="name" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Get a parameter from a ServletRequest.
|
|
Return null if the parameter contains only white spaces.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="htmlFooter" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[HTML footer to be added in the jsps.
|
|
@return the HTML footer.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="percentageGraph" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="perc" type="int"/>
|
|
<param name="width" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Generate the percentage graph and returns HTML representation string
|
|
of the same.
|
|
|
|
@param perc The percentage value for which graph is to be generated
|
|
@param width The width of the display table
|
|
@return HTML String representation of the percentage graph
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<method name="percentageGraph" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="perc" type="float"/>
|
|
<param name="width" type="int"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Generate the percentage graph and returns HTML representation string
|
|
of the same.
|
|
@param perc The percentage value for which graph is to be generated
|
|
@param width The width of the display table
|
|
@return HTML String representation of the percentage graph
|
|
@throws IOException]]>
|
|
</doc>
|
|
</method>
|
|
<field name="HTML_TAIL" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.ServletUtil -->
|
|
<!-- start class org.apache.hadoop.util.Shell -->
|
|
<class name="Shell" extends="java.lang.Object"
|
|
abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Shell"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Shell" type="long"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[@param interval the minimum duration to wait before re-executing the
|
|
command.]]>
|
|
</doc>
|
|
</constructor>
|
|
<method name="getGROUPS_COMMAND" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[a Unix command to get the current user's groups list]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getGET_PERMISSION_COMMAND" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return a Unix command to get permission information.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUlimitMemoryCommand" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<doc>
|
|
<![CDATA[Get the Unix command for setting the maximum virtual memory available
|
|
to a given child process. This is only relevant when we are forking a
|
|
process from within the {@link org.apache.hadoop.mapred.Mapper} or the
|
|
{@link org.apache.hadoop.mapred.Reducer} implementations
|
|
e.g. <a href="{@docRoot}/org/apache/hadoop/mapred/pipes/package-summary.html">Hadoop Pipes</a>
|
|
or <a href="{@docRoot}/org/apache/hadoop/streaming/package-summary.html">Hadoop Streaming</a>.
|
|
|
|
It also checks to ensure that we are running on a *nix platform else
|
|
(e.g. in Cygwin/Windows) it returns <code>null</code>.
|
|
@param conf configuration
|
|
@return a <code>String[]</code> with the ulimit command arguments or
|
|
<code>null</code> if we are running on a non *nix platform or
|
|
if the limit is unspecified.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setEnvironment"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="env" type="java.util.Map<java.lang.String, java.lang.String>"/>
|
|
<doc>
|
|
<![CDATA[set the environment for the command
|
|
@param env Mapping of environment variables]]>
|
|
</doc>
|
|
</method>
|
|
<method name="setWorkingDirectory"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="dir" type="java.io.File"/>
|
|
<doc>
|
|
<![CDATA[set the working directory
|
|
@param dir The directory where the command would be executed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="run"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[check to see if a command needs to be executed and execute if needed]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getExecString" return="java.lang.String[]"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[return an array containing the command name & its parameters]]>
|
|
</doc>
|
|
</method>
|
|
<method name="parseExecResult"
|
|
abstract="true" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="lines" type="java.io.BufferedReader"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Parse the execution result]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getProcess" return="java.lang.Process"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[get the current sub-process executing the given command
|
|
@return process executing the command]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getExitCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[get the exit code
|
|
@return the exit code of the process]]>
|
|
</doc>
|
|
</method>
|
|
<method name="execCommand" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="cmd" type="java.lang.String[]"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Static method to execute a shell command.
|
|
Covers most of the simple cases without requiring the user to implement
|
|
the <code>Shell</code> interface.
|
|
@param cmd shell command to execute.
|
|
@return the output of the executed command.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="LOG" type="org.apache.commons.logging.Log"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="USER_NAME_COMMAND" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[a Unix command to get the current user's name]]>
|
|
</doc>
|
|
</field>
|
|
<field name="SET_PERMISSION_COMMAND" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[a Unix command to set permission]]>
|
|
</doc>
|
|
</field>
|
|
<field name="SET_OWNER_COMMAND" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[a Unix command to set owner]]>
|
|
</doc>
|
|
</field>
|
|
<field name="SET_GROUP_COMMAND" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="WINDOWS" type="boolean"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Set to true on Windows platforms]]>
|
|
</doc>
|
|
</field>
|
|
<doc>
|
|
<![CDATA[A base class for running a Unix command.
|
|
|
|
<code>Shell</code> can be used to run unix commands like <code>du</code> or
|
|
<code>df</code>. It also offers facilities to gate commands by
|
|
time-intervals.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.Shell -->
|
|
<!-- start class org.apache.hadoop.util.Shell.ExitCodeException -->
|
|
<class name="Shell.ExitCodeException" extends="java.io.IOException"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Shell.ExitCodeException" type="int, java.lang.String"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getExitCode" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This is an IOException with exit code added.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.Shell.ExitCodeException -->
|
|
<!-- start class org.apache.hadoop.util.Shell.ShellCommandExecutor -->
|
|
<class name="Shell.ShellCommandExecutor" extends="org.apache.hadoop.util.Shell"
|
|
abstract="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="Shell.ShellCommandExecutor" type="java.lang.String[]"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Shell.ShellCommandExecutor" type="java.lang.String[], java.io.File"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<constructor name="Shell.ShellCommandExecutor" type="java.lang.String[], java.io.File, java.util.Map<java.lang.String, java.lang.String>"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="execute"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
<doc>
|
|
<![CDATA[Execute the shell command.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getExecString" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="parseExecResult"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="protected"
|
|
deprecated="not deprecated">
|
|
<param name="lines" type="java.io.BufferedReader"/>
|
|
<exception name="IOException" type="java.io.IOException"/>
|
|
</method>
|
|
<method name="getOutput" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the output of the shell command.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="toString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the commands of this instance.
|
|
Arguments with spaces in are presented with quotes round; other
|
|
arguments are presented raw
|
|
|
|
@return a string representation of the object.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A simple shell command executor.
|
|
|
|
<code>ShellCommandExecutor</code>should be used in cases where the output
|
|
of the command needs no explicit parsing and where the command, working
|
|
directory and the environment remains unchanged. The output of the command
|
|
is stored as-is and is expected to be small.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.Shell.ShellCommandExecutor -->
|
|
<!-- start class org.apache.hadoop.util.StringUtils -->
|
|
<class name="StringUtils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="StringUtils"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="stringifyException" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="e" type="java.lang.Throwable"/>
|
|
<doc>
|
|
<![CDATA[Make a string representation of the exception.
|
|
@param e The exception to stringify
|
|
@return A string with exception name and call stack.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="simpleHostname" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="fullHostname" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Given a full hostname, return the word upto the first dot.
|
|
@param fullHostname the full hostname
|
|
@return the hostname to the first dot]]>
|
|
</doc>
|
|
</method>
|
|
<method name="humanReadableInt" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="number" type="long"/>
|
|
<doc>
|
|
<![CDATA[Given an integer, return a string that is in an approximate, but human
|
|
readable format.
|
|
It uses the bases 'k', 'm', and 'g' for 1024, 1024**2, and 1024**3.
|
|
@param number the number to format
|
|
@return a human readable form of the integer]]>
|
|
</doc>
|
|
</method>
|
|
<method name="formatPercent" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="done" type="double"/>
|
|
<param name="digits" type="int"/>
|
|
<doc>
|
|
<![CDATA[Format a percentage for presentation to the user.
|
|
@param done the percentage to format (0.0 to 1.0)
|
|
@param digits the number of digits past the decimal point
|
|
@return a string representation of the percentage]]>
|
|
</doc>
|
|
</method>
|
|
<method name="arrayToString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="strs" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[Given an array of strings, return a comma-separated list of its elements.
|
|
@param strs Array of strings
|
|
@return Empty string if strs.length is 0, comma separated list of strings
|
|
otherwise]]>
|
|
</doc>
|
|
</method>
|
|
<method name="byteToHexString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="end" type="int"/>
|
|
<doc>
|
|
<![CDATA[Given an array of bytes it will convert the bytes to a hex string
|
|
representation of the bytes
|
|
@param bytes
|
|
@param start start index, inclusively
|
|
@param end end index, exclusively
|
|
@return hex string representation of the byte array]]>
|
|
</doc>
|
|
</method>
|
|
<method name="byteToHexString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="bytes" type="byte[]"/>
|
|
<doc>
|
|
<![CDATA[Same as byteToHexString(bytes, 0, bytes.length).]]>
|
|
</doc>
|
|
</method>
|
|
<method name="hexStringToByte" return="byte[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="hex" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Given a hexstring this will return the byte array corresponding to the
|
|
string
|
|
@param hex the hex String array
|
|
@return a byte array that is a hex string representation of the given
|
|
string. The size of the byte array is therefore hex.length/2]]>
|
|
</doc>
|
|
</method>
|
|
<method name="uriToString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="uris" type="java.net.URI[]"/>
|
|
<doc>
|
|
<![CDATA[@param uris]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stringToURI" return="java.net.URI[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[@param str]]>
|
|
</doc>
|
|
</method>
|
|
<method name="stringToPath" return="org.apache.hadoop.fs.Path[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String[]"/>
|
|
<doc>
|
|
<![CDATA[@param str]]>
|
|
</doc>
|
|
</method>
|
|
<method name="formatTimeDiff" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="finishTime" type="long"/>
|
|
<param name="startTime" type="long"/>
|
|
<doc>
|
|
<![CDATA[Given a finish and start time in long milliseconds, returns a
|
|
String in the format Xhrs, Ymins, Z sec, for the time difference between two times.
|
|
If finish time comes before start time then negative valeus of X, Y and Z wil return.
|
|
|
|
@param finishTime finish time
|
|
@param startTime start time]]>
|
|
</doc>
|
|
</method>
|
|
<method name="formatTime" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="timeDiff" type="long"/>
|
|
<doc>
|
|
<![CDATA[Given the time in long milliseconds, returns a
|
|
String in the format Xhrs, Ymins, Z sec.
|
|
|
|
@param timeDiff The time difference to format]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getFormattedTimeWithDiff" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="dateFormat" type="java.text.DateFormat"/>
|
|
<param name="finishTime" type="long"/>
|
|
<param name="startTime" type="long"/>
|
|
<doc>
|
|
<![CDATA[Formats time in ms and appends difference (finishTime - startTime)
|
|
as returned by formatTimeDiff().
|
|
If finish time is 0, empty string is returned, if start time is 0
|
|
then difference is not appended to return value.
|
|
@param dateFormat date format to use
|
|
@param finishTime fnish time
|
|
@param startTime start time
|
|
@return formatted value.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStrings" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns an arraylist of strings.
|
|
@param str the comma seperated string values
|
|
@return the arraylist of the comma seperated string values]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getStringCollection" return="java.util.Collection<java.lang.String>"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Returns a collection of strings.
|
|
@param str comma seperated string values
|
|
@return an <code>ArrayList</code> of string values]]>
|
|
</doc>
|
|
</method>
|
|
<method name="split" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Split a string using the default separator
|
|
@param str a string that may have escaped separator
|
|
@return an array of strings]]>
|
|
</doc>
|
|
</method>
|
|
<method name="split" return="java.lang.String[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<param name="escapeChar" type="char"/>
|
|
<param name="separator" type="char"/>
|
|
<doc>
|
|
<![CDATA[Split a string using the given separator
|
|
@param str a string that may have escaped separator
|
|
@param escapeChar a char that be used to escape the separator
|
|
@param separator a separator char
|
|
@return an array of strings]]>
|
|
</doc>
|
|
</method>
|
|
<method name="findNext" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<param name="separator" type="char"/>
|
|
<param name="escapeChar" type="char"/>
|
|
<param name="start" type="int"/>
|
|
<param name="split" type="java.lang.StringBuilder"/>
|
|
<doc>
|
|
<![CDATA[Finds the first occurrence of the separator character ignoring the escaped
|
|
separators starting from the index. Note the substring between the index
|
|
and the position of the separator is passed.
|
|
@param str the source string
|
|
@param separator the character to find
|
|
@param escapeChar character used to escape
|
|
@param start from where to search
|
|
@param split used to pass back the extracted string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="escapeString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Escape commas in the string using the default escape char
|
|
@param str a string
|
|
@return an escaped string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="escapeString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<param name="escapeChar" type="char"/>
|
|
<param name="charToEscape" type="char"/>
|
|
<doc>
|
|
<![CDATA[Escape <code>charToEscape</code> in the string
|
|
with the escape char <code>escapeChar</code>
|
|
|
|
@param str string
|
|
@param escapeChar escape char
|
|
@param charToEscape the char to be escaped
|
|
@return an escaped string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="escapeString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<param name="escapeChar" type="char"/>
|
|
<param name="charsToEscape" type="char[]"/>
|
|
<doc>
|
|
<![CDATA[@param charsToEscape array of characters to be escaped]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unEscapeString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Unescape commas in the string using the default escape char
|
|
@param str a string
|
|
@return an unescaped string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unEscapeString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<param name="escapeChar" type="char"/>
|
|
<param name="charToEscape" type="char"/>
|
|
<doc>
|
|
<![CDATA[Unescape <code>charToEscape</code> in the string
|
|
with the escape char <code>escapeChar</code>
|
|
|
|
@param str string
|
|
@param escapeChar escape char
|
|
@param charToEscape the escaped char
|
|
@return an unescaped string]]>
|
|
</doc>
|
|
</method>
|
|
<method name="unEscapeString" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="str" type="java.lang.String"/>
|
|
<param name="escapeChar" type="char"/>
|
|
<param name="charsToEscape" type="char[]"/>
|
|
<doc>
|
|
<![CDATA[@param charsToEscape array of characters to unescape]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getHostname" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Return hostname without throwing exception.
|
|
@return hostname]]>
|
|
</doc>
|
|
</method>
|
|
<method name="startupShutdownMessage"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="clazz" type="java.lang.Class<?>"/>
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<param name="LOG" type="org.apache.commons.logging.Log"/>
|
|
<doc>
|
|
<![CDATA[Print a log message for starting up and shutting down
|
|
@param clazz the class of the server
|
|
@param args arguments
|
|
@param LOG the target log object]]>
|
|
</doc>
|
|
</method>
|
|
<method name="escapeHTML" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="string" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Escapes HTML Special characters present in the string.
|
|
@param string
|
|
@return HTML Escaped String representation]]>
|
|
</doc>
|
|
</method>
|
|
<field name="COMMA" type="char"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="COMMA_STR" type="java.lang.String"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="ESCAPE_CHAR" type="char"
|
|
transient="false" volatile="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[General string utils]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.StringUtils -->
|
|
<!-- start class org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix -->
|
|
<class name="StringUtils.TraditionalBinaryPrefix" extends="java.lang.Enum<org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix>"
|
|
abstract="false"
|
|
static="true" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
<method name="values" return="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix[]"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="name" type="java.lang.String"/>
|
|
</method>
|
|
<method name="valueOf" return="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="symbol" type="char"/>
|
|
<doc>
|
|
<![CDATA[@return The TraditionalBinaryPrefix object corresponding to the symbol.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="string2long" return="long"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="s" type="java.lang.String"/>
|
|
<doc>
|
|
<![CDATA[Convert a string to long.
|
|
The input string is first be trimmed
|
|
and then it is parsed with traditional binary prefix.
|
|
|
|
For example,
|
|
"-1230k" will be converted to -1230 * 1024 = -1259520;
|
|
"891g" will be converted to 891 * 1024^3 = 956703965184;
|
|
|
|
@param s input string
|
|
@return a long value represented by the input string.]]>
|
|
</doc>
|
|
</method>
|
|
<field name="value" type="long"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<field name="symbol" type="char"
|
|
transient="false" volatile="false"
|
|
static="false" final="true" visibility="public"
|
|
deprecated="not deprecated">
|
|
</field>
|
|
<doc>
|
|
<![CDATA[The traditional binary prefixes, kilo, mega, ..., exa,
|
|
which can be represented by a 64-bit integer.
|
|
TraditionalBinaryPrefix symbol are case insensitive.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix -->
|
|
<!-- start interface org.apache.hadoop.util.Tool -->
|
|
<interface name="Tool" abstract="true"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<implements name="org.apache.hadoop.conf.Configurable"/>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[Execute the command with the given arguments.
|
|
|
|
@param args command specific arguments.
|
|
@return exit code.
|
|
@throws Exception]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A tool interface that supports handling of generic command-line options.
|
|
|
|
<p><code>Tool</code>, is the standard for any Map-Reduce tool/application.
|
|
The tool/application should delegate the handling of
|
|
<a href="{@docRoot}/org/apache/hadoop/util/GenericOptionsParser.html#GenericOptions">
|
|
standard command-line options</a> to {@link ToolRunner#run(Tool, String[])}
|
|
and only handle its custom arguments.</p>
|
|
|
|
<p>Here is how a typical <code>Tool</code> is implemented:</p>
|
|
<p><blockquote><pre>
|
|
public class MyApp extends Configured implements Tool {
|
|
|
|
public int run(String[] args) throws Exception {
|
|
// <code>Configuration</code> processed by <code>ToolRunner</code>
|
|
Configuration conf = getConf();
|
|
|
|
// Create a JobConf using the processed <code>conf</code>
|
|
JobConf job = new JobConf(conf, MyApp.class);
|
|
|
|
// Process custom command-line options
|
|
Path in = new Path(args[1]);
|
|
Path out = new Path(args[2]);
|
|
|
|
// Specify various job-specific parameters
|
|
job.setJobName("my-app");
|
|
job.setInputPath(in);
|
|
job.setOutputPath(out);
|
|
job.setMapperClass(MyApp.MyMapper.class);
|
|
job.setReducerClass(MyApp.MyReducer.class);
|
|
|
|
// Submit the job, then poll for progress until the job is complete
|
|
JobClient.runJob(job);
|
|
}
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
// Let <code>ToolRunner</code> handle generic command-line options
|
|
int res = ToolRunner.run(new Configuration(), new Sort(), args);
|
|
|
|
System.exit(res);
|
|
}
|
|
}
|
|
</pre></blockquote></p>
|
|
|
|
@see GenericOptionsParser
|
|
@see ToolRunner]]>
|
|
</doc>
|
|
</interface>
|
|
<!-- end interface org.apache.hadoop.util.Tool -->
|
|
<!-- start class org.apache.hadoop.util.ToolRunner -->
|
|
<class name="ToolRunner" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="ToolRunner"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="conf" type="org.apache.hadoop.conf.Configuration"/>
|
|
<param name="tool" type="org.apache.hadoop.util.Tool"/>
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[Runs the given <code>Tool</code> by {@link Tool#run(String[])}, after
|
|
parsing with the given generic arguments. Uses the given
|
|
<code>Configuration</code>, or builds one if null.
|
|
|
|
Sets the <code>Tool</code>'s configuration with the possibly modified
|
|
version of the <code>conf</code>.
|
|
|
|
@param conf <code>Configuration</code> for the <code>Tool</code>.
|
|
@param tool <code>Tool</code> to run.
|
|
@param args command-line arguments to the tool.
|
|
@return exit code of the {@link Tool#run(String[])} method.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="run" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="tool" type="org.apache.hadoop.util.Tool"/>
|
|
<param name="args" type="java.lang.String[]"/>
|
|
<exception name="Exception" type="java.lang.Exception"/>
|
|
<doc>
|
|
<![CDATA[Runs the <code>Tool</code> with its <code>Configuration</code>.
|
|
|
|
Equivalent to <code>run(tool.getConf(), tool, args)</code>.
|
|
|
|
@param tool <code>Tool</code> to run.
|
|
@param args command-line arguments to the tool.
|
|
@return exit code of the {@link Tool#run(String[])} method.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="printGenericCommandUsage"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="out" type="java.io.PrintStream"/>
|
|
<doc>
|
|
<![CDATA[Prints generic command-line argurments and usage information.
|
|
|
|
@param out stream to write usage information to.]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[A utility to help run {@link Tool}s.
|
|
|
|
<p><code>ToolRunner</code> can be used to run classes implementing
|
|
<code>Tool</code> interface. It works in conjunction with
|
|
{@link GenericOptionsParser} to parse the
|
|
<a href="{@docRoot}/org/apache/hadoop/util/GenericOptionsParser.html#GenericOptions">
|
|
generic hadoop command line arguments</a> and modifies the
|
|
<code>Configuration</code> of the <code>Tool</code>. The
|
|
application-specific options are passed along without being modified.
|
|
</p>
|
|
|
|
@see Tool
|
|
@see GenericOptionsParser]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.ToolRunner -->
|
|
<!-- start class org.apache.hadoop.util.UTF8ByteArrayUtils -->
|
|
<class name="UTF8ByteArrayUtils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="UTF8ByteArrayUtils"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="findByte" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="end" type="int"/>
|
|
<param name="b" type="byte"/>
|
|
<doc>
|
|
<![CDATA[Find the first occurrence of the given byte b in a UTF-8 encoded string
|
|
@param utf a byte array containing a UTF-8 encoded string
|
|
@param start starting offset
|
|
@param end ending position
|
|
@param b the byte to find
|
|
@return position that first byte occures otherwise -1]]>
|
|
</doc>
|
|
</method>
|
|
<method name="findBytes" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="end" type="int"/>
|
|
<param name="b" type="byte[]"/>
|
|
<doc>
|
|
<![CDATA[Find the first occurrence of the given bytes b in a UTF-8 encoded string
|
|
@param utf a byte array containing a UTF-8 encoded string
|
|
@param start starting offset
|
|
@param end ending position
|
|
@param b the bytes to find
|
|
@return position that first byte occures otherwise -1]]>
|
|
</doc>
|
|
</method>
|
|
<method name="findNthByte" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf" type="byte[]"/>
|
|
<param name="start" type="int"/>
|
|
<param name="length" type="int"/>
|
|
<param name="b" type="byte"/>
|
|
<param name="n" type="int"/>
|
|
<doc>
|
|
<![CDATA[Find the nth occurrence of the given byte b in a UTF-8 encoded string
|
|
@param utf a byte array containing a UTF-8 encoded string
|
|
@param start starting offset
|
|
@param length the length of byte array
|
|
@param b the byte to find
|
|
@param n the desired occurrence of the given byte
|
|
@return position that nth occurrence of the given byte if exists; otherwise -1]]>
|
|
</doc>
|
|
</method>
|
|
<method name="findNthByte" return="int"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="utf" type="byte[]"/>
|
|
<param name="b" type="byte"/>
|
|
<param name="n" type="int"/>
|
|
<doc>
|
|
<![CDATA[Find the nth occurrence of the given byte b in a UTF-8 encoded string
|
|
@param utf a byte array containing a UTF-8 encoded string
|
|
@param b the byte to find
|
|
@param n the desired occurrence of the given byte
|
|
@return position that nth occurrence of the given byte if exists; otherwise -1]]>
|
|
</doc>
|
|
</method>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.UTF8ByteArrayUtils -->
|
|
<!-- start class org.apache.hadoop.util.VersionInfo -->
|
|
<class name="VersionInfo" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="VersionInfo"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="getVersion" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the Hadoop version.
|
|
@return the Hadoop version string, eg. "0.6.3-dev"]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getRevision" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the subversion revision number for the root directory
|
|
@return the revision number, eg. "451451"]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getDate" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The date that Hadoop was compiled.
|
|
@return the compilation date in unix date format]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUser" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[The user that compiled Hadoop.
|
|
@return the username of the user]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getUrl" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Get the subversion URL for the root Hadoop directory.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="getBuildVersion" return="java.lang.String"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<doc>
|
|
<![CDATA[Returns the buildVersion which includes version,
|
|
revision, user and date.]]>
|
|
</doc>
|
|
</method>
|
|
<method name="main"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="args" type="java.lang.String[]"/>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[This class finds the package info for Hadoop and the HadoopVersionAnnotation
|
|
information.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.VersionInfo -->
|
|
<!-- start class org.apache.hadoop.util.XMLUtils -->
|
|
<class name="XMLUtils" extends="java.lang.Object"
|
|
abstract="false"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<constructor name="XMLUtils"
|
|
static="false" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
</constructor>
|
|
<method name="transform"
|
|
abstract="false" native="false" synchronized="false"
|
|
static="true" final="false" visibility="public"
|
|
deprecated="not deprecated">
|
|
<param name="styleSheet" type="java.io.InputStream"/>
|
|
<param name="xml" type="java.io.InputStream"/>
|
|
<param name="out" type="java.io.Writer"/>
|
|
<exception name="TransformerConfigurationException" type="javax.xml.transform.TransformerConfigurationException"/>
|
|
<exception name="TransformerException" type="javax.xml.transform.TransformerException"/>
|
|
<doc>
|
|
<![CDATA[Transform input xml given a stylesheet.
|
|
|
|
@param styleSheet the style-sheet
|
|
@param xml input xml data
|
|
@param out output
|
|
@throws TransformerConfigurationException
|
|
@throws TransformerException]]>
|
|
</doc>
|
|
</method>
|
|
<doc>
|
|
<![CDATA[General xml utilities.]]>
|
|
</doc>
|
|
</class>
|
|
<!-- end class org.apache.hadoop.util.XMLUtils -->
|
|
</package>
|
|
|
|
</api>
|