diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java index caf15e41a0..ea1baed45d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderRemote.java @@ -129,16 +129,22 @@ public Peer getPeer() { @Override public synchronized int read(byte[] buf, int off, int len) throws IOException { - UUID randomId = (LOG.isTraceEnabled() ? UUID.randomUUID() : null); - LOG.trace("Starting read #{} file {} from datanode {}", - randomId, filename, datanodeID.getHostName()); + boolean logTraceEnabled = LOG.isTraceEnabled(); + UUID randomId = null; + if (logTraceEnabled) { + randomId = UUID.randomUUID(); + LOG.trace("Starting read #{} file {} from datanode {}", + randomId, filename, datanodeID.getHostName()); + } if (curDataSlice == null || curDataSlice.remaining() == 0 && bytesNeededToFinish > 0) { readNextPacket(); } - LOG.trace("Finishing read #{}", randomId); + if (logTraceEnabled) { + LOG.trace("Finishing read #{}", randomId); + } if (curDataSlice.remaining() == 0) { // we're at EOF now