From e4ee3d560bddc27a495cc9a158278a9c18276dd0 Mon Sep 17 00:00:00 2001 From: K0K0V0K <109747532+K0K0V0K@users.noreply.github.com> Date: Tue, 27 Aug 2024 17:55:07 +0200 Subject: [PATCH] YARN-10345 HsWebServices containerlogs does not honor ACLs for completed jobs (#7013) - following rest apis did not have access control - - /ws/v1/history/containerlogs/{containerid}/{filename} - - /ws/v1/history/containers/{containerid}/logs Change-Id: I434f6138966ab22583d356509e40b70d328d9e7c --- .../v2/app/webapp/AMWebServices.java | 15 ++++-- .../mapreduce/v2/hs/webapp/HsWebServices.java | 11 ++++- .../v2/hs/webapp/TestHsWebServicesAcls.java | 46 +++++++++++++++---- 3 files changed, 60 insertions(+), 12 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java index 78174afb6f..e95a5d7d33 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java @@ -40,6 +40,7 @@ import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.mapreduce.JobACL; +import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptRequestPBImpl; @@ -113,9 +114,17 @@ private void init() { response.setContentType(null); } - /** - * convert a job id string to an actual job and handle all the error checking. - */ + public static Job getJobFromContainerIdString(String cid, AppContext appCtx) + throws NotFoundException { + //example container_e06_1724414851587_0004_01_000001 + String[] parts = cid.split("_"); + return getJobFromJobIdString(JobID.JOB + "_" + parts[2] + "_" + parts[3], appCtx); + } + + + /** + * convert a job id string to an actual job and handle all the error checking. + */ public static Job getJobFromJobIdString(String jid, AppContext appCtx) throws NotFoundException { JobId jobId; Job job; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java index a26724b1bb..d16b70ac6f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java @@ -42,6 +42,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.mapreduce.JobACL; +import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; @@ -87,6 +88,7 @@ public class HsWebServices extends WebServices { private final HistoryContext ctx; private WebApp webapp; private LogServlet logServlet; + private boolean mrAclsEnabled; private @Context HttpServletResponse response; @Context UriInfo uriInfo; @@ -100,6 +102,7 @@ public HsWebServices(final HistoryContext ctx, this.ctx = ctx; this.webapp = webapp; this.logServlet = new LogServlet(conf, this); + this.mrAclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false); } private boolean hasAccess(Job job, HttpServletRequest request) { @@ -116,6 +119,11 @@ private void checkAccess(Job job, HttpServletRequest request) { throw new WebApplicationException(Status.UNAUTHORIZED); } } + private void checkAccess(String containerIdStr, HttpServletRequest hsr) { + if (mrAclsEnabled) { + checkAccess(AMWebServices.getJobFromContainerIdString(containerIdStr, ctx), hsr); + } + } private void init() { //clear content type @@ -500,7 +508,7 @@ public Response getContainerLogs(@Context HttpServletRequest hsr, @QueryParam(YarnWebServiceParams.MANUAL_REDIRECTION) @DefaultValue("false") boolean manualRedirection) { init(); - + checkAccess(containerIdStr, hsr); WrappedLogMetaRequest.Builder logMetaRequestBuilder = LogServlet.createRequestFromContainerId(containerIdStr); @@ -527,6 +535,7 @@ public Response getContainerLogFile(@Context HttpServletRequest req, @QueryParam(YarnWebServiceParams.MANUAL_REDIRECTION) @DefaultValue("false") boolean manualRedirection) { init(); + checkAccess(containerIdStr, req); return logServlet.getLogFile(req, containerIdStr, filename, format, size, nmId, redirectedFromNode, null, manualRedirection); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java index 8d4f635e11..bb25a97c6c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java @@ -18,23 +18,20 @@ package org.apache.hadoop.mapreduce.v2.hs.webapp; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; - import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response.Status; +import org.junit.Before; +import org.junit.Test; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.Path; @@ -60,9 +57,19 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.server.webapp.LogServlet; import org.apache.hadoop.yarn.webapp.WebApp; -import org.junit.Before; -import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TestHsWebServicesAcls { private static String FRIENDLY_USER = "friendly"; @@ -253,6 +260,29 @@ public void testGetJobTaskAttemptIdCountersAcls() { this.taskIdStr, this.taskAttemptIdStr); } + @Test + public void testLogs() { + HttpServletRequest hsr = mock(HttpServletRequest.class); + when(hsr.getRemoteUser()).thenReturn(ENEMY_USER); + hsWebServices.setLogServlet(mock(LogServlet.class)); + String cid = "container_e02_" + jobIdStr.substring(4) + "_01_000001"; + try { + hsWebServices.getContainerLogFile(hsr, cid, "syslog", + null, null, null, false, false); + fail("enemy can access job"); + } catch (WebApplicationException e) { + assertEquals(Status.UNAUTHORIZED, + Status.fromStatusCode(e.getResponse().getStatus())); + } + + when(hsr.getRemoteUser()).thenReturn(FRIENDLY_USER); + hsWebServices.getContainerLogFile(hsr, cid, "syslog", + "format", "1024", "nmid", false, false); + verify(hsWebServices.getLogServlet(), times(1)) + .getLogFile(any(), anyString(), anyString(), + anyString(), anyString(), anyString(), anyBoolean(), eq(null), anyBoolean()); + } + private static HistoryContext buildHistoryContext(final Configuration conf) throws IOException { HistoryContext ctx = new MockHistoryContext(1, 1, 1);