HDFS-9596. Remove Shuffle Method From DFSUtil. Contributed by BELUGA BEHR.
This commit is contained in:
parent
235e3da90a
commit
1de25d134f
@ -56,7 +56,6 @@
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ThreadLocalRandom;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.CommandLineParser;
|
import org.apache.commons.cli.CommandLineParser;
|
||||||
@ -116,22 +115,6 @@ public static SecureRandom getSecureRandom() {
|
|||||||
return SECURE_RANDOM.get();
|
return SECURE_RANDOM.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Shuffle the elements in the given array. */
|
|
||||||
public static <T> T[] shuffle(final T[] array) {
|
|
||||||
if (array != null && array.length > 0) {
|
|
||||||
for (int n = array.length; n > 1; ) {
|
|
||||||
final int randomIndex = ThreadLocalRandom.current().nextInt(n);
|
|
||||||
n--;
|
|
||||||
if (n != randomIndex) {
|
|
||||||
final T tmp = array[randomIndex];
|
|
||||||
array[randomIndex] = array[n];
|
|
||||||
array[n] = tmp;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return array;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Comparator for sorting DataNodeInfo[] based on
|
* Comparator for sorting DataNodeInfo[] based on
|
||||||
* decommissioned and entering_maintenance states.
|
* decommissioned and entering_maintenance states.
|
||||||
|
@ -29,7 +29,9 @@
|
|||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.net.Socket;
|
import java.net.Socket;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.EnumMap;
|
import java.util.EnumMap;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
@ -50,7 +52,6 @@
|
|||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
|
||||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
||||||
@ -1120,7 +1121,8 @@ public List<DatanodeStorageReport> init() throws IOException {
|
|||||||
final List<DatanodeStorageReport> trimmed = new ArrayList<DatanodeStorageReport>();
|
final List<DatanodeStorageReport> trimmed = new ArrayList<DatanodeStorageReport>();
|
||||||
// create network topology and classify utilization collections:
|
// create network topology and classify utilization collections:
|
||||||
// over-utilized, above-average, below-average and under-utilized.
|
// over-utilized, above-average, below-average and under-utilized.
|
||||||
for (DatanodeStorageReport r : DFSUtil.shuffle(reports)) {
|
Collections.shuffle(Arrays.asList(reports));
|
||||||
|
for (DatanodeStorageReport r : reports) {
|
||||||
final DatanodeInfo datanode = r.getDatanodeInfo();
|
final DatanodeInfo datanode = r.getDatanodeInfo();
|
||||||
if (shouldIgnore(datanode)) {
|
if (shouldIgnore(datanode)) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -22,7 +22,6 @@
|
|||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.fs.permission.PermissionStatus;
|
import org.apache.hadoop.fs.permission.PermissionStatus;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
@ -174,7 +173,7 @@ private void shuffle(DatanodeInfo[] locs, String[] storageIDs) {
|
|||||||
for (int i = 0; i < length; i++) {
|
for (int i = 0; i < length; i++) {
|
||||||
pairs[i] = new Object[]{locs[i], storageIDs[i]};
|
pairs[i] = new Object[]{locs[i], storageIDs[i]};
|
||||||
}
|
}
|
||||||
DFSUtil.shuffle(pairs);
|
Collections.shuffle(Arrays.asList(pairs));
|
||||||
for (int i = 0; i < length; i++) {
|
for (int i = 0; i < length; i++) {
|
||||||
locs[i] = (DatanodeInfo) pairs[i][0];
|
locs[i] = (DatanodeInfo) pairs[i][0];
|
||||||
storageIDs[i] = (String) pairs[i][1];
|
storageIDs[i] = (String) pairs[i][1];
|
||||||
|
Loading…
Reference in New Issue
Block a user