From 88cbe57c069a1d2dd3bfb32e3ad742566470a10b Mon Sep 17 00:00:00 2001 From: Rohith Sharma K S Date: Mon, 28 May 2018 12:45:07 +0530 Subject: [PATCH] MAPREDUCE-7097. MapReduce JHS should honor yarn.webapp.filter-entity-list-by-user. Contributed by Sunil Govindan. --- .../mapreduce/v2/hs/webapp/HsJobBlock.java | 18 +++++++++++++- .../v2/hs/webapp/TestHsJobBlock.java | 20 ++++++++++++++-- .../apache/hadoop/yarn/webapp/Controller.java | 4 ++++ .../org/apache/hadoop/yarn/webapp/View.java | 24 ++++++++++++------- 4 files changed, 55 insertions(+), 11 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java index 18040f0044..9b845cd99c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java @@ -27,6 +27,8 @@ import java.util.Date; import java.util.List; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; @@ -39,8 +41,10 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.webapp.ResponseInfo; import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; @@ -56,9 +60,14 @@ */ public class HsJobBlock extends HtmlBlock { final AppContext appContext; + private UserGroupInformation ugi; + private boolean isFilterAppListByUserEnabled; - @Inject HsJobBlock(AppContext appctx) { + @Inject HsJobBlock(Configuration conf, AppContext appctx, ViewContext ctx) { + super(ctx); appContext = appctx; + isFilterAppListByUserEnabled = conf + .getBoolean(YarnConfiguration.FILTER_ENTITY_LIST_BY_USER, false); } /* @@ -78,6 +87,13 @@ public class HsJobBlock extends HtmlBlock { html.p().__("Sorry, ", jid, " not found.").__(); return; } + ugi = getCallerUGI(); + if (isFilterAppListByUserEnabled && ugi != null + && !j.checkAccess(ugi, JobACL.VIEW_JOB)) { + html.p().__("Sorry, ", jid, " could not be viewed for '", + ugi.getUserName(), "'.").__(); + return; + } if(j instanceof UnparsedJob) { final int taskCount = j.getTotalMaps() + j.getTotalReduces(); UnparsedJob oversizedJob = (UnparsedJob) j; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsJobBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsJobBlock.java index 7fa238e1ce..48e3d3b231 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsJobBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsJobBlock.java @@ -33,8 +33,10 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.util.StringHelper; +import org.apache.hadoop.yarn.webapp.Controller; import org.apache.hadoop.yarn.webapp.ResponseInfo; import org.apache.hadoop.yarn.webapp.SubView; +import org.apache.hadoop.yarn.webapp.View.ViewContext; import org.apache.hadoop.yarn.webapp.view.BlockForTest; import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest; @@ -49,6 +51,8 @@ import java.util.HashMap; import java.util.Map; +import javax.servlet.http.HttpServletRequest; + import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -69,7 +73,13 @@ public void testHsJobBlockForOversizeJobShouldDisplayWarningMessage() { new JobHistoryStubWithAllOversizeJobs(maxAllowedTaskNum); jobHistory.init(config); - HsJobBlock jobBlock = new HsJobBlock(jobHistory) { + Controller.RequestContext rc = mock(Controller.RequestContext.class); + ViewContext view = mock(ViewContext.class); + HttpServletRequest req =mock(HttpServletRequest.class); + when(rc.getRequest()).thenReturn(req); + when(view.requestContext()).thenReturn(rc); + + HsJobBlock jobBlock = new HsJobBlock(config, jobHistory, view) { // override this so that job block can fetch a job id. @Override public Map moreParams() { @@ -101,7 +111,13 @@ public void testHsJobBlockForNormalSizeJobShouldNotDisplayWarningMessage() { JobHistory jobHistory = new JobHitoryStubWithAllNormalSizeJobs(); jobHistory.init(config); - HsJobBlock jobBlock = new HsJobBlock(jobHistory) { + Controller.RequestContext rc = mock(Controller.RequestContext.class); + ViewContext view = mock(ViewContext.class); + HttpServletRequest req =mock(HttpServletRequest.class); + when(rc.getRequest()).thenReturn(req); + when(view.requestContext()).thenReturn(rc); + + HsJobBlock jobBlock = new HsJobBlock(config, jobHistory, view) { // override this so that the job block can fetch a job id. @Override public Map moreParams() { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java index dc4eee238f..1b25b84889 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java @@ -108,6 +108,10 @@ public String get(String key, String defaultValue) { } public String prefix() { return prefix; } + + public HttpServletRequest getRequest() { + return request; + } } private RequestContext context; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/View.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/View.java index c16787d656..666a0bd288 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/View.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/View.java @@ -96,28 +96,36 @@ public ViewContext context() { return vc; } - public Throwable error() { return context().rc.error; } + public Throwable error() { + return context().requestContext().error; + } - public int status() { return context().rc.status; } + public int status() { + return context().requestContext().status; + } - public boolean inDevMode() { return context().rc.devMode; } + public boolean inDevMode() { + return context().requestContext().devMode; + } - public Injector injector() { return context().rc.injector; } + public Injector injector() { + return context().requestContext().injector; + } public T getInstance(Class cls) { return injector().getInstance(cls); } public HttpServletRequest request() { - return context().rc.request; + return context().requestContext().getRequest(); } public HttpServletResponse response() { - return context().rc.response; + return context().requestContext().response; } public Map moreParams() { - return context().rc.moreParams(); + return context().requestContext().moreParams(); } /** @@ -125,7 +133,7 @@ public Map moreParams() { * @return the cookies map */ public Map cookies() { - return context().rc.cookies(); + return context().requestContext().cookies(); } public ServletOutputStream outputStream() {