From 344f324710522ffb27852c1a673c4f7d3d6eac4b Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Tue, 3 Jul 2018 11:07:45 +0200 Subject: [PATCH] HDFS-13712. BlockReaderRemote.read() logging improvement. Contributed by Gergo Repas. --- .../hadoop/hdfs/client/impl/BlockReaderRemote.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java index caf15e41a0..ea1baed45d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java @@ -129,16 +129,22 @@ public Peer getPeer() { @Override public synchronized int read(byte[] buf, int off, int len) throws IOException { - UUID randomId = (LOG.isTraceEnabled() ? UUID.randomUUID() : null); - LOG.trace("Starting read #{} file {} from datanode {}", - randomId, filename, datanodeID.getHostName()); + boolean logTraceEnabled = LOG.isTraceEnabled(); + UUID randomId = null; + if (logTraceEnabled) { + randomId = UUID.randomUUID(); + LOG.trace("Starting read #{} file {} from datanode {}", + randomId, filename, datanodeID.getHostName()); + } if (curDataSlice == null || curDataSlice.remaining() == 0 && bytesNeededToFinish > 0) { readNextPacket(); } - LOG.trace("Finishing read #{}", randomId); + if (logTraceEnabled) { + LOG.trace("Finishing read #{}", randomId); + } if (curDataSlice.remaining() == 0) { // we're at EOF now