From c249db80c25e277629865f2e9fd89496e744e7ef Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Tue, 23 Aug 2022 02:49:04 -0700 Subject: [PATCH] HADOOP-18380. fs.s3a.prefetch.block.size to be read through longBytesOption (#4762) Contributed by Viraj Jasani. --- .../java/org/apache/hadoop/fs/s3a/S3AFileSystem.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index 769edf01f1..ad94e20e71 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -513,8 +513,13 @@ public void initialize(URI name, Configuration originalConf) enableMultiObjectsDelete = conf.getBoolean(ENABLE_MULTI_DELETE, true); this.prefetchEnabled = conf.getBoolean(PREFETCH_ENABLED_KEY, PREFETCH_ENABLED_DEFAULT); - this.prefetchBlockSize = intOption( - conf, PREFETCH_BLOCK_SIZE_KEY, PREFETCH_BLOCK_DEFAULT_SIZE, PREFETCH_BLOCK_DEFAULT_SIZE); + long prefetchBlockSizeLong = + longBytesOption(conf, PREFETCH_BLOCK_SIZE_KEY, PREFETCH_BLOCK_DEFAULT_SIZE, + PREFETCH_BLOCK_DEFAULT_SIZE); + if (prefetchBlockSizeLong > (long) Integer.MAX_VALUE) { + throw new IOException("S3A prefatch block size exceeds int limit"); + } + this.prefetchBlockSize = (int) prefetchBlockSizeLong; this.prefetchBlockCount = intOption(conf, PREFETCH_BLOCK_COUNT_KEY, PREFETCH_BLOCK_DEFAULT_COUNT, 1);