55a576906d
1. The class WrappedIO has been extended with more filesystem operations - openFile() - PathCapabilities - StreamCapabilities - ByteBufferPositionedReadable All these static methods raise UncheckedIOExceptions rather than checked ones. 2. The adjacent class org.apache.hadoop.io.wrappedio.WrappedStatistics provides similar access to IOStatistics/IOStatisticsContext classes and operations. Allows callers to: * Get a serializable IOStatisticsSnapshot from an IOStatisticsSource or IOStatistics instance * Save an IOStatisticsSnapshot to file * Convert an IOStatisticsSnapshot to JSON * Given an object which may be an IOStatisticsSource, return an object whose toString() value is a dynamically generated, human readable summary. This is for logging. * Separate getters to the different sections of IOStatistics. * Mean values are returned as a Map.Pair<Long, Long> of (samples, sum) from which means may be calculated. There are examples of the dynamic bindings to these classes in: org.apache.hadoop.io.wrappedio.impl.DynamicWrappedIO org.apache.hadoop.io.wrappedio.impl.DynamicWrappedStatistics These use DynMethods and other classes in the package org.apache.hadoop.util.dynamic which are based on the Apache Parquet equivalents. This makes re-implementing these in that library and others which their own fork of the classes (example: Apache Iceberg) 3. The openFile() option "fs.option.openfile.read.policy" has added specific file format policies for the core filetypes * avro * columnar * csv * hbase * json * orc * parquet S3A chooses the appropriate sequential/random policy as a A policy `parquet, columnar, vector, random, adaptive` will use the parquet policy for any filesystem aware of it, falling back to the first entry in the list which the specific version of the filesystem recognizes 4. New Path capability fs.capability.virtual.block.locations Indicates that locations are generated client side and don't refer to real hosts. Contributed by Steve Loughran
464 lines
15 KiB
XML
464 lines
15 KiB
XML
<!--
|
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
contributor license agreements. See the NOTICE file distributed with
|
|
this work for additional information regarding copyright ownership.
|
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
(the "License"); you may not use this file except in compliance with
|
|
the License. You may obtain a copy of the License at
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
See the License for the specific language governing permissions and
|
|
limitations under the License.
|
|
-->
|
|
<FindBugsFilter>
|
|
<Match>
|
|
<Package name="org.apache.hadoop.security.proto" />
|
|
</Match>
|
|
<Match>
|
|
<Package name="org.apache.hadoop.tools.proto" />
|
|
</Match>
|
|
<Match>
|
|
<Bug pattern="EI_EXPOSE_REP" />
|
|
</Match>
|
|
<Match>
|
|
<Bug pattern="EI_EXPOSE_REP2" />
|
|
</Match>
|
|
<Match>
|
|
<Bug pattern="SE_COMPARATOR_SHOULD_BE_SERIALIZABLE" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="~.*_jsp" />
|
|
<Bug pattern="DLS_DEAD_LOCAL_STORE" />
|
|
</Match>
|
|
<Match>
|
|
<Field name="_jspx_dependants" />
|
|
<Bug pattern="UWF_UNWRITTEN_FIELD" />
|
|
</Match>
|
|
<!--
|
|
Inconsistent synchronization for Client.Connection.out is
|
|
is intentional to make a connection to be closed instantly.
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.ipc.Client$Connection" />
|
|
<Field name="ipcStreams" />
|
|
<Bug pattern="IS2_INCONSISTENT_SYNC" />
|
|
</Match>
|
|
<!--
|
|
The nativeCoder field is get/set and used by native codes.
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.io.erasurecode.rawcoder.AbstractNativeRawEncoder" />
|
|
<Field name="nativeCoder" />
|
|
<Bug pattern="UUF_UNUSED_FIELD" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.io.erasurecode.rawcoder.AbstractNativeRawDecoder" />
|
|
<Field name="nativeCoder" />
|
|
<Bug pattern="UUF_UNUSED_FIELD" />
|
|
</Match>
|
|
<!--
|
|
Further SaslException should be ignored during cleanup and
|
|
original exception should be re-thrown.
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.security.SaslRpcClient" />
|
|
<Bug pattern="DE_MIGHT_IGNORE" />
|
|
</Match>
|
|
<!--
|
|
Ignore Cross Scripting Vulnerabilities
|
|
-->
|
|
<Match>
|
|
<Package name="~org.apache.hadoop.mapred.*" />
|
|
<Bug code="XSS" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.taskdetails_jsp" />
|
|
<Bug code="HRS" />
|
|
</Match>
|
|
<!--
|
|
Ignore warnings where child class has the same name as
|
|
super class. Classes based on Old API shadow names from
|
|
new API. Should go off after HADOOP-1.0
|
|
-->
|
|
<Match>
|
|
<Class name="~org.apache.hadoop.mapred.*" />
|
|
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
|
|
</Match>
|
|
<!--
|
|
Ignore warnings for usage of System.exit. This is
|
|
required and have been well thought out
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.Child$2" />
|
|
<Method name="run" />
|
|
<Bug pattern="DM_EXIT" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.JobTracker" />
|
|
<Method name="addHostToNodeMapping" />
|
|
<Bug pattern="DM_EXIT" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.Task" />
|
|
<Or>
|
|
<Method name="done" />
|
|
<Method name="commit" />
|
|
<Method name="statusUpdate" />
|
|
</Or>
|
|
<Bug pattern="DM_EXIT" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.Task$TaskReporter" />
|
|
<Method name="run" />
|
|
<Bug pattern="DM_EXIT" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.ProgramDriver" />
|
|
<Method name="driver" />
|
|
<Bug pattern="DM_EXIT" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.RunJar" />
|
|
<Method name="run" />
|
|
<Bug pattern="DM_EXIT" />
|
|
</Match>
|
|
<!--
|
|
We need to cast objects between old and new api objects
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.OutputCommitter" />
|
|
<Bug pattern="BC_UNCONFIRMED_CAST" />
|
|
</Match>
|
|
<!--
|
|
We intentionally do the get name from the inner class
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.TaskTracker$MapEventsFetcherThread" />
|
|
<Method name="run" />
|
|
<Bug pattern="IA_AMBIGUOUS_INVOCATION_OF_INHERITED_OR_OUTER_METHOD" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.FileOutputCommitter" />
|
|
<Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
|
|
</Match>
|
|
<!--
|
|
Ignoring this warning as resolving this would need a non-trivial change in code
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor" />
|
|
<Method name="configure" />
|
|
<Field name="maxNumItems" />
|
|
<Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
|
|
</Match>
|
|
<!--
|
|
Comes from org.apache.jasper.runtime.ResourceInjector. Cannot do much.
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.jobqueue_005fdetails_jsp" />
|
|
<Field name="_jspx_resourceInjector" />
|
|
<Bug pattern="SE_BAD_FIELD" />
|
|
</Match>
|
|
<!--
|
|
Storing textInputFormat and then passing it as a parameter. Safe to ignore.
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob" />
|
|
<Method name="createValueAggregatorJob" />
|
|
<Bug pattern="DLS_DEAD_STORE_OF_CLASS_LITERAL" />
|
|
</Match>
|
|
<!--
|
|
Can remove this after the upgrade to findbugs1.3.8
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.lib.db.DBInputFormat" />
|
|
<Method name="getSplits" />
|
|
<Bug pattern="DLS_DEAD_LOCAL_STORE" />
|
|
</Match>
|
|
<!--
|
|
This is a spurious warning. Just ignore
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.mapred.MapTask$MapOutputBuffer" />
|
|
<Field name="kvindex" />
|
|
<Bug pattern="IS2_INCONSISTENT_SYNC" />
|
|
</Match>
|
|
|
|
<!--
|
|
core changes
|
|
-->
|
|
<Match>
|
|
<Class name="~org.apache.hadoop.*" />
|
|
<Bug code="MS" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.fs.FileSystem" />
|
|
<Method name="checkPath" />
|
|
<Bug pattern="ES_COMPARING_STRINGS_WITH_EQ" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.io.Closeable" />
|
|
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.security.AccessControlException" />
|
|
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.ProcfsBasedProcessTree" />
|
|
<Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
|
|
</Match>
|
|
|
|
<!--
|
|
Streaming, Examples
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.streaming.StreamUtil$TaskId" />
|
|
<Bug pattern="URF_UNREAD_FIELD" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.examples.DBCountPageView" />
|
|
<Method name="verify" />
|
|
<Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.examples.ContextFactory" />
|
|
<Method name="setAttributes" />
|
|
<Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
|
|
</Match>
|
|
|
|
<!--
|
|
TFile
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.io.file.tfile.Chunk$ChunkDecoder" />
|
|
<Method name="close" />
|
|
<Bug pattern="SR_NOT_CHECKED" />
|
|
</Match>
|
|
<!--
|
|
The purpose of skip() is to drain remaining bytes of the chunk-encoded
|
|
stream (one chunk at a time). The termination condition is checked by
|
|
checkEOF().
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.io.file.tfile.Utils" />
|
|
<Method name="writeVLong" />
|
|
<Bug pattern="SF_SWITCH_FALLTHROUGH" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.io.Text" />
|
|
<Method name="bytesToCodePoint" />
|
|
<Bug pattern="SF_SWITCH_NO_DEFAULT" />
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.PureJavaCrc32C" />
|
|
<Method name="update" />
|
|
<Bug pattern="SF_SWITCH_NO_DEFAULT" />
|
|
</Match>
|
|
<!--
|
|
The switch condition fall through is intentional and for performance
|
|
purposes.
|
|
-->
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.log.EventCounter"/>
|
|
<!-- backward compatibility -->
|
|
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.metrics.jvm.EventCounter"/>
|
|
<!-- backward compatibility -->
|
|
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtobufRpcEngineProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtobufRpcEngine2Protos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtocolInfoProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.IpcConnectionContextProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcHeaderProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ha\.proto\.HAServiceProtocolProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ha\.proto\.ZKFCProtocolProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.security\.proto\.SecurityProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.TestProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.proto\.RefreshCallQueueProtocolProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.ipc\.proto\.GenericRefreshProtocolProtos.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.tracing\.TraceAdminPB.*"/>
|
|
</Match>
|
|
<Match>
|
|
<!-- protobuf generated code -->
|
|
<Class name="~org\.apache\.hadoop\.fs\.FSProto.*"/>
|
|
</Match>
|
|
|
|
<!--
|
|
Manually checked, misses child thread manually syncing on parent's intrinsic lock.
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.metrics2.lib.MutableQuantiles" />
|
|
<Field name="previousSnapshot" />
|
|
<Bug pattern="IS2_INCONSISTENT_SYNC" />
|
|
</Match>
|
|
<!--
|
|
The method uses a generic type T that extends two other types
|
|
T1 and T2. Findbugs complains of a cast from T1 to T2.
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.fs.DelegationTokenRenewer" />
|
|
<Method name="removeRenewAction" />
|
|
<Bug pattern="BC_UNCONFIRMED_CAST" />
|
|
</Match>
|
|
|
|
<!--
|
|
The switch condition for INITIATE is expected to fallthru to RESPONSE
|
|
to process initial sasl response token included in the INITIATE
|
|
-->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.ipc.Server$Connection" />
|
|
<Method name="processSaslMessage" />
|
|
<Bug pattern="SF_SWITCH_FALLTHROUGH" />
|
|
</Match>
|
|
|
|
<!-- WA_NOT_IN_LOOP is invalid in util.concurrent.AsyncGet$Util.wait. -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.concurrent.AsyncGet$Util" />
|
|
<Method name="wait" />
|
|
<Bug pattern="WA_NOT_IN_LOOP" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.service.AbstractService" />
|
|
<Method name="stop" />
|
|
<Bug code="JLM" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.service.AbstractService" />
|
|
<Method name="waitForServiceToStop" />
|
|
<Bug code="JLM" />
|
|
</Match>
|
|
|
|
<!-- code from maven source, null value is checked at callee side. -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.ComparableVersion$ListItem" />
|
|
<Method name="compareTo" />
|
|
<Bug code="NP" />
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.HttpExceptionUtils"/>
|
|
<Method name="validateResponse"/>
|
|
<Bug pattern="REC_CATCH_EXCEPTION"/>
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.conf.Configuration"/>
|
|
<Method name="loadProperty"/>
|
|
<Bug pattern="NP_NULL_PARAM_DEREF"/>
|
|
</Match>
|
|
|
|
<!-- Parameter is checked with Strings.isNullOrEmpty before accessing -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.conf.Configuration"/>
|
|
<Method name="asXmlDocument"/>
|
|
<Bug pattern="NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE"/>
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.ipc.ExternalCall"/>
|
|
<Filed name="done"/>
|
|
<Bug pattern="JLM_JSR166_UTILCONCURRENT_MONITORENTER"/>
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.metrics2.impl.MetricsConfig"/>
|
|
<Method name="toString"/>
|
|
<Bug pattern="DM_DEFAULT_ENCODING"/>
|
|
</Match>
|
|
|
|
<!-- We need to make the methods public because PBHelperClient calls them. -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.crypto.CipherSuite"/>
|
|
<Method name="setUnknownValue"/>
|
|
<Bug pattern="ME_ENUM_FIELD_SETTER"/>
|
|
</Match>
|
|
<Match>
|
|
<Class name="org.apache.hadoop.crypto.CryptoProtocolVersion"/>
|
|
<Method name="setUnknownValue"/>
|
|
<Bug pattern="ME_ENUM_FIELD_SETTER"/>
|
|
</Match>
|
|
|
|
<!-- We need to make the method public for testing. -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.metrics2.lib.DefaultMetricsSystem"/>
|
|
<Method name="setMiniClusterMode"/>
|
|
<Bug pattern="ME_ENUM_FIELD_SETTER"/>
|
|
</Match>
|
|
|
|
<!-- Experimental interface. Ignore. -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.metrics2.lib.DefaultMetricsFactory"/>
|
|
<Method name="setInstance"/>
|
|
<Bug pattern="ME_ENUM_FIELD_SETTER"/>
|
|
</Match>
|
|
|
|
<!-- findbugs is complaining that a stream isn't closed. It will be. -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.JsonSerialization"/>
|
|
<Method name="save"/>
|
|
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
|
|
</Match>
|
|
|
|
<Match>
|
|
<Class name="org.apache.hadoop.ipc.internal.ShadedProtobufHelper" />
|
|
<Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
|
|
</Match>
|
|
|
|
<!-- class cast after an assignableFrom check. -->
|
|
<Match>
|
|
<Class name="org.apache.hadoop.util.dynamic.DynMethods" />
|
|
<Bug pattern="BC_UNCONFIRMED_CAST" />
|
|
</Match>
|
|
</FindBugsFilter>
|