diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/VectoredReadUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/VectoredReadUtils.java index 493b8c3a33..fa0440620a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/VectoredReadUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/VectoredReadUtils.java @@ -294,7 +294,14 @@ public static List validateAndSortRanges( final Optional fileLength) throws EOFException { requireNonNull(input, "Null input list"); - checkArgument(!input.isEmpty(), "Empty input list"); + + if (input.isEmpty()) { + // this may seem a pathological case, but it was valid + // before and somehow Spark can call it through parquet. + LOG.debug("Empty input list"); + return input; + } + final List sortedRanges; if (input.size() == 1) { diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/fsdatainputstream.md b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/fsdatainputstream.md index 6cbb54ea70..db844a94e3 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/fsdatainputstream.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/fsdatainputstream.md @@ -474,7 +474,6 @@ No empty lists. ```python if ranges = null raise NullPointerException -if ranges.len() = 0 raise IllegalArgumentException if allocate = null raise NullPointerException ``` diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractVectoredReadTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractVectoredReadTest.java index d6a1fb1f0b..aa478f3af6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractVectoredReadTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractVectoredReadTest.java @@ -340,6 +340,17 @@ public void testConsecutiveRanges() throws Exception { } } + @Test + public void testEmptyRanges() throws Exception { + List fileRanges = new ArrayList<>(); + try (FSDataInputStream in = openVectorFile()) { + in.readVectored(fileRanges, allocate); + Assertions.assertThat(fileRanges) + .describedAs("Empty ranges must stay empty") + .isEmpty(); + } + } + /** * Test to validate EOF ranges. *

diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/impl/TestVectoredReadUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/impl/TestVectoredReadUtils.java index 2a290058ca..3fd3fe4d1f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/impl/TestVectoredReadUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/impl/TestVectoredReadUtils.java @@ -702,12 +702,11 @@ private static Stream mockStreamWithReadFully() throws IOException { } /** - * Empty ranges cannot be sorted. + * Empty ranges are allowed. */ @Test - public void testEmptyRangesRaisesIllegalArgument() throws Throwable { - intercept(IllegalArgumentException.class, - () -> validateAndSortRanges(Collections.emptyList(), Optional.empty())); + public void testEmptyRangesAllowed() throws Throwable { + validateAndSortRanges(Collections.emptyList(), Optional.empty()); } /**