YARN-10028. Integrate the new abstract log servlet to the JobHistory server. Contributed by Adam Antal

This commit is contained in:
Szilard Nemeth 2020-01-14 11:00:08 +01:00
parent 6b86a5110e
commit 13cea0412c
13 changed files with 96 additions and 9 deletions

View File

@ -79,6 +79,8 @@
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.client.ClientRMProxy;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.RPCUtil;
@ -150,9 +152,11 @@ protected void serviceStart() throws Exception {
}
@VisibleForTesting
protected void initializeWebApp(Configuration conf) {
protected void initializeWebApp(Configuration conf) throws IOException {
webApp = new HsWebApp(history);
InetSocketAddress bindAddress = MRWebAppUtil.getJHSWebBindAddress(conf);
ApplicationClientProtocol appClientProtocol =
ClientRMProxy.createRMProxy(conf, ApplicationClientProtocol.class);
// NOTE: there should be a .at(InetSocketAddress)
WebApps
.$for("jobhistory", HistoryClientService.class, this, "ws")
@ -163,6 +167,7 @@ protected void initializeWebApp(Configuration conf) {
JHAdminConfig.MR_WEBAPP_SPNEGO_USER_NAME_KEY)
.withCSRFProtection(JHAdminConfig.MR_HISTORY_CSRF_PREFIX)
.withXFSProtection(JHAdminConfig.MR_HISTORY_XFS_PREFIX)
.withAppClientProtocol(appClientProtocol)
.at(NetUtils.getHostPortString(bindAddress)).start(webApp);
String connectHost = MRWebAppUtil.getJHSWebappURLWithoutScheme(conf).split(":")[0];

View File

@ -20,8 +20,10 @@
import java.io.IOException;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@ -30,9 +32,12 @@
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.JobACL;
@ -62,7 +67,11 @@
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.webapp.YarnWebServiceParams;
import org.apache.hadoop.yarn.server.webapp.LogServlet;
import org.apache.hadoop.yarn.server.webapp.WebServices;
import org.apache.hadoop.yarn.webapp.BadRequestException;
import org.apache.hadoop.yarn.webapp.NotFoundException;
import org.apache.hadoop.yarn.webapp.WebApp;
@ -71,19 +80,23 @@
import com.google.inject.Inject;
@Path("/ws/v1/history")
public class HsWebServices {
public class HsWebServices extends WebServices {
private final HistoryContext ctx;
private WebApp webapp;
private final LogServlet logServlet;
private @Context HttpServletResponse response;
@Context
UriInfo uriInfo;
@Context UriInfo uriInfo;
@Inject
public HsWebServices(final HistoryContext ctx, final Configuration conf,
final WebApp webapp) {
public HsWebServices(final HistoryContext ctx,
final Configuration conf,
final WebApp webapp,
@Nullable ApplicationClientProtocol appBaseProto) {
super(appBaseProto);
this.ctx = ctx;
this.webapp = webapp;
this.logServlet = new LogServlet(conf, this);
}
private boolean hasAccess(Job job, HttpServletRequest request) {
@ -409,4 +422,39 @@ public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
return new JobTaskAttemptCounterInfo(ta);
}
@GET
@Path("/containers/{containerid}/logs")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@InterfaceAudience.Public
@InterfaceStability.Unstable
public Response getLogs(@Context HttpServletRequest hsr,
@PathParam(YarnWebServiceParams.CONTAINER_ID) String containerIdStr,
@QueryParam(YarnWebServiceParams.NM_ID) String nmId,
@QueryParam(YarnWebServiceParams.REDIRECTED_FROM_NODE)
@DefaultValue("false") boolean redirectedFromNode) {
init();
return logServlet.getContainerLogsInfo(hsr, containerIdStr, nmId,
redirectedFromNode, null);
}
@GET
@Path("/containerlogs/{containerid}/{filename}")
@Produces({ MediaType.TEXT_PLAIN + "; " + JettyUtils.UTF_8 })
@InterfaceAudience.Public
@InterfaceStability.Unstable
public Response getLogs(@Context HttpServletRequest req,
@PathParam(YarnWebServiceParams.CONTAINER_ID) String containerIdStr,
@PathParam(YarnWebServiceParams.CONTAINER_LOG_FILE_NAME)
String filename,
@QueryParam(YarnWebServiceParams.RESPONSE_CONTENT_FORMAT)
String format,
@QueryParam(YarnWebServiceParams.RESPONSE_CONTENT_SIZE)
String size,
@QueryParam(YarnWebServiceParams.NM_ID) String nmId,
@QueryParam(YarnWebServiceParams.REDIRECTED_FROM_NODE)
@DefaultValue("false") boolean redirectedFromNode) {
init();
return logServlet.getLogFile(req, containerIdStr, filename, format, size,
nmId, redirectedFromNode, null);
}
}

View File

@ -28,6 +28,7 @@
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import com.google.inject.util.Providers;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
@ -36,6 +37,7 @@
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
@ -86,6 +88,7 @@ protected void configureServlets() {
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}

View File

@ -84,7 +84,7 @@ public void setup() throws IOException {
this.ctx = buildHistoryContext(this.conf);
WebApp webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
this.hsWebServices= new HsWebServices(ctx, conf, webApp);
this.hsWebServices = new HsWebServices(ctx, conf, webApp, null);
this.hsWebServices.setResponse(mock(HttpServletResponse.class));
Job job = ctx.getAllJobs().values().iterator().next();

View File

@ -34,6 +34,7 @@
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import com.google.inject.util.Providers;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -46,6 +47,7 @@
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
@ -99,6 +101,7 @@ protected void configureServlets() {
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}

View File

@ -35,6 +35,7 @@
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import com.google.inject.util.Providers;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@ -47,6 +48,7 @@
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
@ -125,6 +127,7 @@ protected void configureServlets() {
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}

View File

@ -35,6 +35,7 @@
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import com.google.inject.util.Providers;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
@ -44,6 +45,7 @@
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
@ -96,6 +98,7 @@ protected void configureServlets() {
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}
@ -553,6 +556,7 @@ protected void configureServlets() {
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}

View File

@ -31,6 +31,7 @@
import javax.ws.rs.core.MediaType;
import com.google.inject.util.Providers;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -40,6 +41,7 @@
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
@ -85,6 +87,7 @@ protected void configureServlets() {
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}

View File

@ -33,6 +33,7 @@
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import com.google.inject.util.Providers;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.TaskID;
@ -45,6 +46,7 @@
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
@ -97,6 +99,7 @@ protected void configureServlets() {
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}

View File

@ -42,6 +42,7 @@
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
import org.apache.hadoop.security.http.XFrameOptionsFilter;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.eclipse.jetty.webapp.WebAppContext;
@ -104,6 +105,7 @@ static class ServletStruct {
private String xfsConfigPrefix;
private final HashSet<ServletStruct> servlets = new HashSet<ServletStruct>();
private final HashMap<String, Object> attributes = new HashMap<String, Object>();
private ApplicationClientProtocol appClientProtocol;
Builder(String name, Class<T> api, T application, String wsName) {
this.name = name;
@ -232,6 +234,12 @@ public Builder<T> inDevMode() {
return this;
}
public Builder<T> withAppClientProtocol(
ApplicationClientProtocol appClientProto) {
this.appClientProtocol = appClientProto;
return this;
}
public WebApp build(WebApp webapp) {
if (webapp == null) {
webapp = new WebApp() {
@ -401,7 +409,6 @@ public void setup() {
webapp.setConf(conf);
webapp.setHttpServer(server);
} catch (ClassNotFoundException e) {
throw new WebAppException("Error starting http server", e);
} catch (IOException e) {
@ -413,6 +420,9 @@ protected void configure() {
if (api != null) {
bind(api).toInstance(application);
}
if (appClientProtocol != null) {
bind(ApplicationClientProtocol.class).toInstance(appClientProtocol);
}
}
});
LOG.info("Registered webapp guice modules");

View File

@ -31,7 +31,6 @@
import org.w3c.dom.NodeList;
public class WebServicesTestUtils {
public static long getXmlLong(Element element, String name) {
String val = getXmlString(element, name);
return Long.parseLong(val);

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.webapp;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import javax.servlet.http.HttpServletRequest;
@ -26,6 +27,7 @@
* for providing various application related information.
*/
@InterfaceAudience.LimitedPrivate({"YARN"})
@InterfaceStability.Unstable
public interface AppInfoProvider {
/**

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.yarn.server.webapp;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
@ -24,6 +26,8 @@
* Utility class that wraps application information
* required by the {@link LogServlet} class.
*/
@InterfaceAudience.LimitedPrivate({"YARN"})
@InterfaceStability.Unstable
class BasicAppInfo {
private final YarnApplicationState appState;
private final String user;