YARN-7423. Improve service client loggings. Contributed by Jian He
This commit is contained in:
parent
a6c4bd74b6
commit
a127f7b0fb
@ -39,7 +39,7 @@
|
||||
import org.apache.hadoop.yarn.service.monitor.ServiceMonitor;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@ -112,7 +112,7 @@ protected void serviceInit(Configuration conf) throws Exception {
|
||||
}
|
||||
|
||||
protected ContainerId getAMContainerId() throws BadClusterStateException {
|
||||
return ContainerId.fromString(SliderUtils.mandatoryEnvVariable(
|
||||
return ContainerId.fromString(ServiceUtils.mandatoryEnvVariable(
|
||||
ApplicationConstants.Environment.CONTAINER_ID.name()));
|
||||
}
|
||||
|
||||
|
@ -24,7 +24,7 @@
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
@ -201,9 +201,9 @@ private String toIndentedString(java.lang.Object o) {
|
||||
* this ConfigFile.
|
||||
*/
|
||||
public synchronized void mergeFrom(Configuration that) {
|
||||
SliderUtils.mergeMapsIgnoreDuplicateKeys(this.properties, that
|
||||
ServiceUtils.mergeMapsIgnoreDuplicateKeys(this.properties, that
|
||||
.getProperties());
|
||||
SliderUtils.mergeMapsIgnoreDuplicateKeys(this.env, that.getEnv());
|
||||
ServiceUtils.mergeMapsIgnoreDuplicateKeys(this.env, that.getEnv());
|
||||
|
||||
Map<String, ConfigFile> thatMap = new HashMap<>();
|
||||
for (ConfigFile file : that.getFiles()) {
|
||||
@ -212,7 +212,7 @@ public synchronized void mergeFrom(Configuration that) {
|
||||
for (ConfigFile thisFile : files) {
|
||||
if(thatMap.containsKey(thisFile.getDestFile())) {
|
||||
ConfigFile thatFile = thatMap.get(thisFile.getDestFile());
|
||||
SliderUtils.mergeMapsIgnoreDuplicateKeys(thisFile.getProperties(),
|
||||
ServiceUtils.mergeMapsIgnoreDuplicateKeys(thisFile.getProperties(),
|
||||
thatFile.getProperties());
|
||||
thatMap.remove(thisFile.getDestFile());
|
||||
}
|
||||
|
@ -69,7 +69,7 @@
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceRegistryUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ZookeeperUtils;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
import org.apache.hadoop.yarn.util.Times;
|
||||
@ -80,19 +80,13 @@
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import static org.apache.hadoop.yarn.api.records.YarnApplicationState.*;
|
||||
import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.*;
|
||||
import static org.apache.hadoop.yarn.service.utils.ServiceApiUtil.jsonSerDeser;
|
||||
import static org.apache.hadoop.yarn.service.utils.SliderUtils.*;
|
||||
import static org.apache.hadoop.yarn.service.utils.ServiceUtils.*;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Unstable
|
||||
@ -169,7 +163,7 @@ public Service loadAppJsonFromLocalFS(String fileName, String serviceName,
|
||||
fileName);
|
||||
}
|
||||
Path filePath = new Path(file.getAbsolutePath());
|
||||
LOG.info("Loading service definition from: " + filePath);
|
||||
LOG.info("Loading service definition from local FS: " + filePath);
|
||||
Service service = jsonSerDeser
|
||||
.load(FileSystem.getLocal(getConfig()), filePath);
|
||||
if (!StringUtils.isEmpty(serviceName)) {
|
||||
@ -482,7 +476,7 @@ private synchronized CuratorFramework getCuratorClient()
|
||||
getConfig().get(RegistryConstants.KEY_REGISTRY_ZK_QUORUM);
|
||||
|
||||
// though if neither is set: trouble
|
||||
if (SliderUtils.isUnset(registryQuorum)) {
|
||||
if (ServiceUtils.isUnset(registryQuorum)) {
|
||||
throw new BadConfigException(
|
||||
"No Zookeeper quorum provided in the" + " configuration property "
|
||||
+ RegistryConstants.KEY_REGISTRY_ZK_QUORUM);
|
||||
@ -505,7 +499,7 @@ private void verifyNoLiveAppInRM(String serviceName, String action)
|
||||
types.add(YarnServiceConstants.APP_TYPE);
|
||||
Set<String> tags = null;
|
||||
if (serviceName != null) {
|
||||
tags = Collections.singleton(SliderUtils.createNameTag(serviceName));
|
||||
tags = Collections.singleton(ServiceUtils.createNameTag(serviceName));
|
||||
}
|
||||
GetApplicationsRequest request = GetApplicationsRequest.newInstance();
|
||||
request.setApplicationTypes(types);
|
||||
@ -652,7 +646,7 @@ private String buildCommandLine(String serviceName, Configuration conf,
|
||||
// write out the path output
|
||||
CLI.addOutAndErrFiles(STDOUT_AM, STDERR_AM);
|
||||
String cmdStr = CLI.build();
|
||||
LOG.info("AM launch command: {}", cmdStr);
|
||||
LOG.debug("AM launch command: {}", cmdStr);
|
||||
return cmdStr;
|
||||
}
|
||||
|
||||
@ -671,12 +665,12 @@ private Map<String, String> addAMEnv() throws IOException {
|
||||
}
|
||||
if (!UserGroupInformation.isSecurityEnabled()) {
|
||||
String userName = UserGroupInformation.getCurrentUser().getUserName();
|
||||
LOG.info("Run as user " + userName);
|
||||
LOG.debug("Run as user " + userName);
|
||||
// HADOOP_USER_NAME env is used by UserGroupInformation when log in
|
||||
// This env makes AM run as this user
|
||||
env.put("HADOOP_USER_NAME", userName);
|
||||
}
|
||||
LOG.info("AM env: \n{}", stringifyMap(env));
|
||||
LOG.debug("AM env: \n{}", stringifyMap(env));
|
||||
return env;
|
||||
}
|
||||
|
||||
@ -689,11 +683,14 @@ protected Path addJarResource(String serviceName,
|
||||
libPath, "lib", false);
|
||||
Path dependencyLibTarGzip = fs.getDependencyTarGzip();
|
||||
if (fs.isFile(dependencyLibTarGzip)) {
|
||||
LOG.info("Loading lib tar from " + fs.getFileSystem().getScheme() + ":/"
|
||||
LOG.debug("Loading lib tar from " + fs.getFileSystem().getScheme() + ":/"
|
||||
+ dependencyLibTarGzip);
|
||||
SliderUtils.putAmTarGzipAndUpdate(localResources, fs);
|
||||
fs.submitTarGzipAndUpdate(localResources);
|
||||
} else {
|
||||
String[] libs = SliderUtils.getLibDirs();
|
||||
String[] libs = ServiceUtils.getLibDirs();
|
||||
LOG.info("Uploading all dependency jars to HDFS. For faster submission of" +
|
||||
" apps, pre-upload dependency jars to HDFS "
|
||||
+ "using command: yarn app -enableFastLaunch");
|
||||
for (String libDirProp : libs) {
|
||||
ProviderUtils.addAllDependencyJars(localResources, fs, libPath, "lib",
|
||||
libDirProp);
|
||||
@ -740,7 +737,8 @@ public int actionStart(String serviceName) throws YarnException, IOException {
|
||||
ApplicationId appId = submitApp(service);
|
||||
service.setId(appId.toString());
|
||||
// write app definition on to hdfs
|
||||
createDirAndPersistApp(appDir, service);
|
||||
Path appJson = persistAppDef(appDir, service);
|
||||
LOG.info("Persisted service " + service.getName() + " at " + appJson);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -763,16 +761,14 @@ private void createDirAndPersistApp(Path appDir, Service service)
|
||||
throws IOException, SliderException {
|
||||
FsPermission appDirPermission = new FsPermission("750");
|
||||
fs.createWithPermissions(appDir, appDirPermission);
|
||||
persistAppDef(appDir, service);
|
||||
Path appJson = persistAppDef(appDir, service);
|
||||
LOG.info("Persisted service " + service.getName() + " at " + appJson);
|
||||
}
|
||||
|
||||
private void persistAppDef(Path appDir, Service service)
|
||||
throws IOException {
|
||||
private Path persistAppDef(Path appDir, Service service) throws IOException {
|
||||
Path appJson = new Path(appDir, service.getName() + ".json");
|
||||
jsonSerDeser
|
||||
.save(fs.getFileSystem(), appJson, service, true);
|
||||
LOG.info(
|
||||
"Persisted service " + service.getName() + " at " + appJson);
|
||||
jsonSerDeser.save(fs.getFileSystem(), appJson, service, true);
|
||||
return appJson;
|
||||
}
|
||||
|
||||
private void addKeytabResourceIfSecure(SliderFileSystem fileSystem,
|
||||
@ -922,7 +918,7 @@ public int actionDependency(boolean overwrite)
|
||||
return EXIT_SUCCESS;
|
||||
}
|
||||
|
||||
String[] libDirs = SliderUtils.getLibDirs();
|
||||
String[] libDirs = ServiceUtils.getLibDirs();
|
||||
if (libDirs.length > 0) {
|
||||
File tempLibTarGzipFile = File.createTempFile(
|
||||
YarnServiceConstants.DEPENDENCY_TAR_GZ_FILE_NAME + "_",
|
||||
|
@ -40,7 +40,7 @@
|
||||
import org.apache.hadoop.yarn.state.StateMachine;
|
||||
import org.apache.hadoop.yarn.state.StateMachineFactory;
|
||||
import org.apache.hadoop.yarn.util.Apps;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.apache.hadoop.yarn.service.monitor.probe.MonitorUtils;
|
||||
import org.apache.hadoop.yarn.service.monitor.probe.Probe;
|
||||
import org.slf4j.Logger;
|
||||
@ -419,7 +419,7 @@ private void updateMetrics(ContainerStatus status) {
|
||||
|
||||
public boolean areDependenciesReady() {
|
||||
List<String> dependencies = componentSpec.getDependencies();
|
||||
if (SliderUtils.isEmpty(dependencies)) {
|
||||
if (ServiceUtils.isEmpty(dependencies)) {
|
||||
return true;
|
||||
}
|
||||
for (String dependency : dependencies) {
|
||||
@ -445,7 +445,7 @@ public boolean areDependenciesReady() {
|
||||
public Map<String, String> getDependencyHostIpTokens() {
|
||||
Map<String, String> tokens = new HashMap<>();
|
||||
List<String> dependencies = componentSpec.getDependencies();
|
||||
if (SliderUtils.isEmpty(dependencies)) {
|
||||
if (ServiceUtils.isEmpty(dependencies)) {
|
||||
return tokens;
|
||||
}
|
||||
for (String dependency : dependencies) {
|
||||
@ -455,8 +455,8 @@ public Map<String, String> getDependencyHostIpTokens() {
|
||||
if (instance.getContainerStatus() == null) {
|
||||
continue;
|
||||
}
|
||||
if (SliderUtils.isEmpty(instance.getContainerStatus().getIPs()) ||
|
||||
SliderUtils.isUnset(instance.getContainerStatus().getHost())) {
|
||||
if (ServiceUtils.isEmpty(instance.getContainerStatus().getIPs()) ||
|
||||
ServiceUtils.isUnset(instance.getContainerStatus().getHost())) {
|
||||
continue;
|
||||
}
|
||||
String ip = instance.getContainerStatus().getIPs().get(0);
|
||||
|
@ -44,7 +44,7 @@
|
||||
import org.apache.hadoop.yarn.state.StateMachine;
|
||||
import org.apache.hadoop.yarn.state.StateMachineFactory;
|
||||
import org.apache.hadoop.yarn.util.BoundedAppender;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.apache.hadoop.yarn.service.timelineservice.ServiceTimelinePublisher;
|
||||
import org.apache.hadoop.yarn.service.monitor.probe.ProbeStatus;
|
||||
import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders;
|
||||
@ -503,7 +503,7 @@ private static class ContainerStatusRetriever implements Runnable {
|
||||
+ nodeId + ", will try again", e);
|
||||
return;
|
||||
}
|
||||
if (SliderUtils.isEmpty(status.getIPs()) || SliderUtils
|
||||
if (ServiceUtils.isEmpty(status.getIPs()) || ServiceUtils
|
||||
.isUnset(status.getHost())) {
|
||||
return;
|
||||
}
|
||||
|
@ -28,7 +28,7 @@
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
|
||||
import org.apache.hadoop.yarn.service.utils.CoreFileSystem;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -132,7 +132,7 @@ public void addCommand(String cmd) {
|
||||
*/
|
||||
public ContainerLaunchContext completeContainerLaunch() throws IOException {
|
||||
|
||||
String cmdStr = SliderUtils.join(commands, " ", false);
|
||||
String cmdStr = ServiceUtils.join(commands, " ", false);
|
||||
log.debug("Completed setting up container command {}", cmdStr);
|
||||
containerLaunchContext.setCommands(commands);
|
||||
|
||||
@ -205,7 +205,7 @@ private void dumpLocalResources() {
|
||||
|
||||
String key = entry.getKey();
|
||||
LocalResource val = entry.getValue();
|
||||
log.debug(key + "=" + SliderUtils.stringify(val.getResource()));
|
||||
log.debug(key + "=" + ServiceUtils.stringify(val.getResource()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@ -63,7 +63,7 @@ public String toString() {
|
||||
}
|
||||
|
||||
public String buildClasspath() {
|
||||
return SliderUtils.join(pathElements,
|
||||
return ServiceUtils.join(pathElements,
|
||||
CLASS_PATH_SEPARATOR,
|
||||
false);
|
||||
}
|
||||
|
@ -20,7 +20,7 @@
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@ -81,6 +81,6 @@ public String toString() {
|
||||
* @return the command line
|
||||
*/
|
||||
public String build() {
|
||||
return SliderUtils.join(argumentList, " ");
|
||||
return ServiceUtils.join(argumentList, " ");
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
|
||||
|
||||
import java.util.Map;
|
||||
@ -53,7 +53,7 @@ protected String getJavaBinary() {
|
||||
* trimmed.
|
||||
*/
|
||||
public void setJVMHeap(String heap) {
|
||||
if (SliderUtils.isSet(heap)) {
|
||||
if (ServiceUtils.isSet(heap)) {
|
||||
add("-Xmx" + heap.trim());
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,7 @@
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerStatus;
|
||||
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -75,7 +75,7 @@ private static HttpURLConnection getConnection(URL url, int timeout) throws
|
||||
public ProbeStatus ping(ComponentInstance instance) {
|
||||
ProbeStatus status = new ProbeStatus();
|
||||
ContainerStatus containerStatus = instance.getContainerStatus();
|
||||
if (containerStatus == null || SliderUtils.isEmpty(containerStatus.getIPs())
|
||||
if (containerStatus == null || ServiceUtils.isEmpty(containerStatus.getIPs())
|
||||
|| StringUtils.isEmpty(containerStatus.getHost())) {
|
||||
status.fail(this, new IOException("IP is not available yet"));
|
||||
return status;
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -67,7 +67,7 @@ public static PortProbe create(Map<String, String> props)
|
||||
public ProbeStatus ping(ComponentInstance instance) {
|
||||
ProbeStatus status = new ProbeStatus();
|
||||
|
||||
if (instance.getContainerStatus() == null || SliderUtils
|
||||
if (instance.getContainerStatus() == null || ServiceUtils
|
||||
.isEmpty(instance.getContainerStatus().getIPs())) {
|
||||
status.fail(this, new IOException(
|
||||
instance.getCompInstanceName() + ": IP is not available yet"));
|
||||
|
@ -23,7 +23,7 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.yarn.service.api.records.Artifact;
|
||||
import org.apache.hadoop.yarn.service.api.records.ConfigFile;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Paths;
|
||||
@ -53,12 +53,12 @@ public AbstractClientProvider() {
|
||||
public static final Set<String> createApplicationTags(String appName,
|
||||
String appVersion, String appDescription) {
|
||||
Set<String> tags = new HashSet<>();
|
||||
tags.add(SliderUtils.createNameTag(appName));
|
||||
tags.add(ServiceUtils.createNameTag(appName));
|
||||
if (appVersion != null) {
|
||||
tags.add(SliderUtils.createVersionTag(appVersion));
|
||||
tags.add(ServiceUtils.createVersionTag(appVersion));
|
||||
}
|
||||
if (appDescription != null) {
|
||||
tags.add(SliderUtils.createDescriptionTag(appDescription));
|
||||
tags.add(ServiceUtils.createDescriptionTag(appDescription));
|
||||
}
|
||||
return tags;
|
||||
}
|
||||
|
@ -25,7 +25,7 @@
|
||||
import org.apache.hadoop.yarn.service.api.records.Component;
|
||||
import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.apache.hadoop.yarn.service.exceptions.SliderException;
|
||||
import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
|
||||
import org.apache.hadoop.yarn.service.containerlaunch.CommandLineBuilder;
|
||||
@ -70,7 +70,7 @@ public void buildContainerLaunchContext(AbstractLauncher launcher,
|
||||
.initCompTokensForSubstitute(instance);
|
||||
tokensForSubstitution.putAll(globalTokens);
|
||||
// Set the environment variables in launcher
|
||||
launcher.putEnv(SliderUtils
|
||||
launcher.putEnv(ServiceUtils
|
||||
.buildEnvMap(component.getConfiguration(), tokensForSubstitution));
|
||||
launcher.setEnv("WORK_DIR", ApplicationConstants.Environment.PWD.$());
|
||||
launcher.setEnv("LOG_DIR", ApplicationConstants.LOG_DIR_EXPANSION_VAR);
|
||||
|
@ -41,7 +41,7 @@
|
||||
import org.apache.hadoop.yarn.service.utils.PublishedConfiguration;
|
||||
import org.apache.hadoop.yarn.service.utils.PublishedConfigurationOutputter;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -94,7 +94,7 @@ public static boolean addProviderJar(
|
||||
IOException,
|
||||
SliderException {
|
||||
try {
|
||||
SliderUtils.putJar(providerResources,
|
||||
ServiceUtils.putJar(providerResources,
|
||||
sliderFileSystem,
|
||||
providerClass,
|
||||
tempPath,
|
||||
@ -127,14 +127,14 @@ public static void addAllDependencyJars(
|
||||
String libDir,
|
||||
String libLocalSrcDir)
|
||||
throws IOException, SliderException {
|
||||
if (SliderUtils.isSet(libLocalSrcDir)) {
|
||||
if (ServiceUtils.isSet(libLocalSrcDir)) {
|
||||
File file = new File(libLocalSrcDir);
|
||||
if (!file.exists() || !file.isDirectory()) {
|
||||
throw new BadCommandArgumentsException(
|
||||
"Supplied lib src dir %s is not valid", libLocalSrcDir);
|
||||
}
|
||||
}
|
||||
SliderUtils.putAllJars(providerResources, sliderFileSystem, tempPath,
|
||||
ServiceUtils.putAllJars(providerResources, sliderFileSystem, tempPath,
|
||||
libDir, libLocalSrcDir);
|
||||
}
|
||||
|
||||
@ -174,7 +174,7 @@ public void localizeServiceKeytabs(AbstractLauncher launcher,
|
||||
Configuration conf = service.getConfiguration();
|
||||
String keytabPathOnHost =
|
||||
conf.getProperty(YarnServiceConf.KEY_AM_KEYTAB_LOCAL_PATH);
|
||||
if (SliderUtils.isUnset(keytabPathOnHost)) {
|
||||
if (ServiceUtils.isUnset(keytabPathOnHost)) {
|
||||
String amKeytabName =
|
||||
conf.getProperty(YarnServiceConf.KEY_AM_LOGIN_KEYTAB_NAME);
|
||||
String keytabDir =
|
||||
|
@ -29,7 +29,7 @@
|
||||
|
||||
import org.apache.hadoop.registry.client.types.ServiceRecord;
|
||||
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceId;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -70,9 +70,9 @@ public YarnRegistryViewForProviders(RegistryOperations registryOperations,
|
||||
Preconditions.checkArgument(registryOperations != null,
|
||||
"null registry operations");
|
||||
Preconditions.checkArgument(user != null, "null user");
|
||||
Preconditions.checkArgument(SliderUtils.isSet(serviceClass),
|
||||
Preconditions.checkArgument(ServiceUtils.isSet(serviceClass),
|
||||
"unset service class");
|
||||
Preconditions.checkArgument(SliderUtils.isSet(instanceName),
|
||||
Preconditions.checkArgument(ServiceUtils.isSet(instanceName),
|
||||
"instanceName");
|
||||
Preconditions.checkArgument(applicationAttemptId != null,
|
||||
"null applicationAttemptId");
|
||||
|
@ -409,7 +409,7 @@ public Map<String, LocalResource> submitDirectory(Path srcDir, String destRelati
|
||||
*/
|
||||
public LocalResource submitJarWithClass(Class clazz, Path tempPath, String subdir, String jarName)
|
||||
throws IOException, SliderException {
|
||||
File localFile = SliderUtils.findContainingJarOrFail(clazz);
|
||||
File localFile = ServiceUtils.findContainingJarOrFail(clazz);
|
||||
return submitFile(localFile, tempPath, subdir, jarName);
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ public int getAvailablePort() throws SliderException, IOException {
|
||||
if (remainingPortsToCheck != null) {
|
||||
return getAvailablePortViaPortArray();
|
||||
} else {
|
||||
return SliderUtils.getOpenPort();
|
||||
return ServiceUtils.getOpenPort();
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,7 +95,7 @@ private int getAvailablePortViaPortArray() throws SliderException {
|
||||
Iterator<Integer> portsToCheck = this.remainingPortsToCheck.iterator();
|
||||
while (portsToCheck.hasNext() && !found) {
|
||||
int portToCheck = portsToCheck.next();
|
||||
found = SliderUtils.isPortAvailable(portToCheck);
|
||||
found = ServiceUtils.isPortAvailable(portToCheck);
|
||||
if (found) {
|
||||
availablePort = portToCheck;
|
||||
portsToCheck.remove();
|
||||
|
@ -369,7 +369,7 @@ private static Map<String, Component> sortByDependencies(List<Component>
|
||||
continue;
|
||||
}
|
||||
boolean dependenciesAlreadySorted = true;
|
||||
if (!SliderUtils.isEmpty(component.getDependencies())) {
|
||||
if (!ServiceUtils.isEmpty(component.getDependencies())) {
|
||||
for (String dependency : component.getDependencies()) {
|
||||
if (!sortedComponents.containsKey(dependency)) {
|
||||
dependenciesAlreadySorted = false;
|
||||
|
@ -60,11 +60,11 @@
|
||||
/**
|
||||
* These are slider-specific Util methods
|
||||
*/
|
||||
public final class SliderUtils {
|
||||
public final class ServiceUtils {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SliderUtils.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(ServiceUtils.class);
|
||||
|
||||
private SliderUtils() {
|
||||
private ServiceUtils() {
|
||||
}
|
||||
|
||||
/**
|
||||
@ -93,7 +93,7 @@ public static boolean isEmpty(Collection l) {
|
||||
* @throws FileNotFoundException if the class did not resolve to a file
|
||||
*/
|
||||
public static File findContainingJarOrFail(Class clazz) throws IOException {
|
||||
File localFile = SliderUtils.findContainingJar(clazz);
|
||||
File localFile = ServiceUtils.findContainingJar(clazz);
|
||||
if (null == localFile) {
|
||||
throw new FileNotFoundException("Could not find JAR containing " + clazz);
|
||||
}
|
||||
@ -397,8 +397,8 @@ public static void putAllJars(Map<String, LocalResource> providerResources,
|
||||
Path tempPath,
|
||||
String libDir,
|
||||
String srcPath) throws IOException, SliderException {
|
||||
log.info("Loading all dependencies from {}", srcPath);
|
||||
if (SliderUtils.isSet(srcPath)) {
|
||||
log.debug("Loading all dependencies from {}", srcPath);
|
||||
if (ServiceUtils.isSet(srcPath)) {
|
||||
File srcFolder = new File(srcPath);
|
||||
FilenameFilter jarFilter = createJarFilter();
|
||||
File[] listOfJars = srcFolder.listFiles(jarFilter);
|
||||
@ -424,21 +424,6 @@ public boolean accept(File dir, String name) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit the AM tar.gz containing all dependencies and map it
|
||||
* @param providerResources provider map to build up
|
||||
* @param sliderFileSystem remote fs
|
||||
*/
|
||||
public static void putAmTarGzipAndUpdate(
|
||||
Map<String, LocalResource> providerResources,
|
||||
SliderFileSystem sliderFileSystem
|
||||
) throws IOException, SliderException {
|
||||
log.info("Loading all dependencies from {}{}",
|
||||
YarnServiceConstants.DEPENDENCY_TAR_GZ_FILE_NAME,
|
||||
YarnServiceConstants.DEPENDENCY_TAR_GZ_FILE_EXT);
|
||||
sliderFileSystem.submitTarGzipAndUpdate(providerResources);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a file:// path from a local file
|
||||
* @param file file to point the path
|
||||
@ -480,10 +465,6 @@ public static ClasspathConstructor buildClasspath(String sliderConfDir,
|
||||
classpath.addLibDir(libdir);
|
||||
if (sliderFileSystem.isFile(sliderFileSystem.getDependencyTarGzip())) {
|
||||
classpath.addLibDir(YarnServiceConstants.DEPENDENCY_LOCALIZED_DIR_LINK);
|
||||
} else {
|
||||
log.info(
|
||||
"For faster submission of apps, upload dependencies using cmd " +
|
||||
"enableFastLaunch");
|
||||
}
|
||||
classpath.addRemoteClasspathEnvVar();
|
||||
classpath.append(ApplicationConstants.Environment.HADOOP_CONF_DIR.$$());
|
@ -32,7 +32,7 @@ public static String buildConnectionString(String zkHosts, int port) {
|
||||
String zkPort = Integer.toString(port);
|
||||
//parse the hosts
|
||||
String[] hostlist = zkHosts.split(",", 0);
|
||||
String quorum = SliderUtils.join(hostlist, ":" + zkPort + ",", false);
|
||||
String quorum = ServiceUtils.join(hostlist, ":" + zkPort + ",", false);
|
||||
return quorum;
|
||||
}
|
||||
|
||||
@ -117,7 +117,7 @@ public static String buildQuorum(List<HostAndPort> hostAndPorts, int defaultPort
|
||||
for (HostAndPort hostAndPort : hostAndPorts) {
|
||||
entries.add(buildQuorumEntry(hostAndPort, defaultPort));
|
||||
}
|
||||
return SliderUtils.join(entries, ",", false);
|
||||
return ServiceUtils.join(entries, ",", false);
|
||||
}
|
||||
|
||||
public static String convertToHostsOnlyList(String quorum) throws
|
||||
|
@ -20,7 +20,6 @@
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user