HADOOP-18380. fs.s3a.prefetch.block.size to be read through longBytesOption (#4762)
Contributed by Viraj Jasani.
This commit is contained in:
parent
eda4bb5dcd
commit
c249db80c2
@ -513,8 +513,13 @@ public void initialize(URI name, Configuration originalConf)
|
||||
enableMultiObjectsDelete = conf.getBoolean(ENABLE_MULTI_DELETE, true);
|
||||
|
||||
this.prefetchEnabled = conf.getBoolean(PREFETCH_ENABLED_KEY, PREFETCH_ENABLED_DEFAULT);
|
||||
this.prefetchBlockSize = intOption(
|
||||
conf, PREFETCH_BLOCK_SIZE_KEY, PREFETCH_BLOCK_DEFAULT_SIZE, PREFETCH_BLOCK_DEFAULT_SIZE);
|
||||
long prefetchBlockSizeLong =
|
||||
longBytesOption(conf, PREFETCH_BLOCK_SIZE_KEY, PREFETCH_BLOCK_DEFAULT_SIZE,
|
||||
PREFETCH_BLOCK_DEFAULT_SIZE);
|
||||
if (prefetchBlockSizeLong > (long) Integer.MAX_VALUE) {
|
||||
throw new IOException("S3A prefatch block size exceeds int limit");
|
||||
}
|
||||
this.prefetchBlockSize = (int) prefetchBlockSizeLong;
|
||||
this.prefetchBlockCount =
|
||||
intOption(conf, PREFETCH_BLOCK_COUNT_KEY, PREFETCH_BLOCK_DEFAULT_COUNT, 1);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user