From 9af30d46c6e82332a8eda20cb3eb5f987e25e7a2 Mon Sep 17 00:00:00 2001 From: Rohith Sharma K S Date: Sat, 17 Feb 2018 20:30:28 +0530 Subject: [PATCH] YARN-7919. Refactor timelineservice-hbase module into submodules. Contributed by Haibo Chen. --- .../resources/assemblies/hadoop-yarn-dist.xml | 22 +- hadoop-project/pom.xml | 14 +- .../pom.xml | 26 +- ...TimelineReaderWebServicesHBaseStorage.java | 8 +- .../storage/TestHBaseTimelineStorageApps.java | 50 +-- .../TestHBaseTimelineStorageEntities.java | 89 ++-- .../TestHBaseTimelineStorageSchema.java | 61 +-- .../flow/TestHBaseStorageFlowActivity.java | 24 +- .../storage/flow/TestHBaseStorageFlowRun.java | 42 +- .../TestHBaseStorageFlowRunCompaction.java | 58 +-- .../pom.xml | 219 +++++++++ .../reader/filter/TimelineFilterUtils.java | 23 +- .../reader/filter/package-info.java | 0 .../storage/HBaseTimelineReaderImpl.java | 0 .../storage/HBaseTimelineWriterImpl.java | 132 +++--- .../storage/TimelineSchemaCreator.java | 39 +- .../application/ApplicationTableRW.java} | 47 +- .../storage/application/package-info.java | 0 .../storage/apptoflow/AppToFlowTableRW.java} | 47 +- .../storage/apptoflow/package-info.java | 0 .../storage/common/BaseTableRW.java} | 6 +- .../storage/common/ColumnRWHelper.java} | 415 ++++++++++-------- .../common/HBaseTimelineStorageUtils.java | 121 +++++ .../common/TimelineHBaseSchemaConstants.java | 0 .../storage/common/TypedBufferedMutator.java} | 4 +- .../storage/common/package-info.java | 0 .../storage/entity/EntityTableRW.java} | 48 +- .../storage/entity/package-info.java | 0 .../storage/flow/FlowActivityTableRW.java} | 32 +- .../storage/flow/FlowRunTableRW.java} | 67 +-- .../storage/flow/package-info.java | 0 .../timelineservice/storage/package-info.java | 0 .../reader/AbstractTimelineStorageReader.java | 17 +- .../reader/ApplicationEntityReader.java | 23 +- .../storage/reader/EntityTypeReader.java | 4 +- .../reader/FlowActivityEntityReader.java | 21 +- .../storage/reader/FlowRunEntityReader.java | 24 +- .../storage/reader/GenericEntityReader.java | 22 +- .../reader/SubApplicationEntityReader.java | 17 +- .../storage/reader/TimelineEntityReader.java | 35 +- .../reader/TimelineEntityReaderFactory.java | 0 .../storage/reader/package-info.java | 0 .../SubApplicationTableRW.java} | 51 +-- .../storage/subapplication/package-info.java | 0 .../common/TestHBaseTimelineStorageUtils.java | 0 .../pom.xml | 132 ++++++ .../application/ApplicationColumn.java | 31 +- .../application/ApplicationColumnFamily.java | 0 .../application/ApplicationColumnPrefix.java | 110 +---- .../application/ApplicationRowKey.java | 0 .../application/ApplicationRowKeyPrefix.java | 0 .../storage/application/ApplicationTable.java | 60 +++ .../storage/application/package-info.java | 28 ++ .../storage/apptoflow/AppToFlowColumn.java | 28 +- .../apptoflow/AppToFlowColumnFamily.java | 0 .../apptoflow/AppToFlowColumnPrefix.java | 105 +++++ .../storage/apptoflow/AppToFlowRowKey.java | 0 .../storage/apptoflow/AppToFlowTable.java | 60 +++ .../storage/apptoflow/package-info.java | 28 ++ .../storage/common/AppIdKeyConverter.java | 6 +- .../storage/common/BaseTable.java} | 9 +- .../storage/common/Column.java | 50 +-- .../storage/common/ColumnFamily.java | 2 +- .../storage/common/ColumnHelper.java | 101 +++++ .../storage/common/ColumnPrefix.java | 71 +++ .../storage/common/EventColumnName.java | 0 .../common/EventColumnNameConverter.java | 0 .../storage/common/GenericConverter.java | 0 .../common/HBaseTimelineSchemaUtils.java | 156 +++++++ .../storage/common/KeyConverter.java | 0 .../storage/common/KeyConverterToString.java | 0 .../storage/common/LongConverter.java | 2 +- .../storage/common/LongKeyConverter.java | 0 .../storage/common/NumericValueConverter.java | 0 .../timelineservice/storage/common/Range.java | 0 .../storage/common/RowKeyPrefix.java | 0 .../storage/common/Separator.java | 0 .../storage/common/StringKeyConverter.java | 0 .../storage/common/TimestampGenerator.java | 0 .../storage/common/ValueConverter.java | 0 .../storage/common/package-info.java | 28 ++ .../storage/entity/EntityColumn.java | 31 +- .../storage/entity/EntityColumnFamily.java | 0 .../storage/entity/EntityColumnPrefix.java | 115 +---- .../storage/entity/EntityRowKey.java | 0 .../storage/entity/EntityRowKeyPrefix.java | 0 .../storage/entity/EntityTable.java | 61 +++ .../storage/entity/package-info.java | 28 ++ .../flow/AggregationCompactionDimension.java | 0 .../storage/flow/AggregationOperation.java | 0 .../storage/flow/Attribute.java | 0 .../flow/FlowActivityColumnFamily.java | 0 .../flow/FlowActivityColumnPrefix.java | 133 ++++++ .../storage/flow/FlowActivityRowKey.java | 4 +- .../flow/FlowActivityRowKeyPrefix.java | 0 .../storage/flow/FlowActivityTable.java | 45 ++ .../storage/flow/FlowRunColumn.java | 43 +- .../storage/flow/FlowRunColumnFamily.java | 0 .../storage/flow/FlowRunColumnPrefix.java | 129 ++++++ .../storage/flow/FlowRunRowKey.java | 0 .../storage/flow/FlowRunRowKeyPrefix.java | 0 .../storage/flow/FlowRunTable.java | 77 ++++ .../storage/flow/package-info.java | 29 ++ .../timelineservice/storage/package-info.java | 28 ++ .../subapplication/SubApplicationColumn.java | 33 +- .../SubApplicationColumnFamily.java | 0 .../SubApplicationColumnPrefix.java | 115 +---- .../subapplication/SubApplicationRowKey.java | 0 .../SubApplicationRowKeyPrefix.java | 0 .../subapplication/SubApplicationTable.java | 64 +++ .../storage/subapplication/package-info.java | 28 ++ .../TestCustomApplicationIdConversion.java | 2 +- .../storage/common/TestKeyConverters.java | 0 .../storage/common/TestRowKeys.java | 2 +- .../storage/common/TestRowKeysAsString.java | 2 +- .../storage/common/TestSeparator.java | 0 .../pom.xml | 161 +++++++ .../src/assembly/coprocessor.xml | 37 ++ .../common/HBaseTimelineServerUtils.java | 135 ++++++ .../storage/common/package-info.java | 28 ++ .../storage/flow/FlowRunCoprocessor.java | 4 +- .../storage/flow/FlowScanner.java | 18 +- .../storage/flow/FlowScannerOperation.java | 0 .../storage/flow/package-info.java | 29 ++ .../timelineservice/storage/package-info.java | 28 ++ .../pom.xml | 193 +------- .../apptoflow/AppToFlowColumnPrefix.java | 206 --------- .../storage/common/ColumnPrefix.java | 145 ------ .../common/HBaseTimelineStorageUtils.java | 354 --------------- .../flow/FlowActivityColumnPrefix.java | 221 ---------- .../storage/flow/FlowRunColumnPrefix.java | 217 --------- 131 files changed, 3024 insertions(+), 2536 deletions(-) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java (93%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java (82%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java (92%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTableRW.java} (66%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTableRW.java} (67%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTableRW.java} (96%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnRWHelper.java} (51%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java} (93%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTableRW.java} (65%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTableRW.java} (76%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTableRW.java} (58%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java (91%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java (97%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java (98%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java (92%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java (94%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java (97%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java (98%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java (93%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java => hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTableRW.java} (64%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-client}/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/pom.xml rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java (78%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnFamily.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java (55%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKeyPrefix.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java (78%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java (95%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java => hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java} (73%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java (50%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java (95%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnName.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnNameConverter.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/GenericConverter.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineSchemaUtils.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverter.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverterToString.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java (97%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongKeyConverter.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/RowKeyPrefix.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/StringKeyConverter.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java (79%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java (56%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java (98%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java (72%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnFamily.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKeyPrefix.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumn.java (78%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnFamily.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java (57%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKey.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKeyPrefix.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestCustomApplicationIdConversion.java (95%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java (100%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java (99%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeysAsString.java (98%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-common}/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/pom.xml create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/assembly/coprocessor.xml create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineServerUtils.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-server}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java (99%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-server}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java (98%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/{ => hadoop-yarn-server-timelineservice-hbase-server}/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScannerOperation.java (100%) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml index a77dd20977..2c266b617f 100644 --- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml +++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml @@ -213,7 +213,11 @@ - hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/target/lib + hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/target/lib + share/hadoop/${hadoop.component}/timelineservice/lib + + + hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/target/lib share/hadoop/${hadoop.component}/timelineservice/lib @@ -221,7 +225,8 @@ org.apache.hadoop:hadoop-yarn-server-timelineservice - org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase + org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-client + org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-common share/hadoop/${hadoop.component}/timelineservice @@ -229,6 +234,19 @@ false + + + org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-server + + + share/hadoop/${hadoop.component}/timelineservice + false + + coprocessor + false + hadoop-yarn-server-timelineservice-hbase-coprocessor-${module.version}.${module.extension} + + org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-tests diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index c27596c2bb..ce51c994a3 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -397,7 +397,19 @@ org.apache.hadoop - hadoop-yarn-server-timelineservice-hbase + hadoop-yarn-server-timelineservice-hbase-client + ${project.version} + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice-hbase-common + ${project.version} + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice-hbase-server ${project.version} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml index f36897b82c..d9f992d056 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml @@ -60,7 +60,31 @@ org.apache.hadoop - hadoop-yarn-server-timelineservice-hbase + hadoop-yarn-server-timelineservice-hbase-client + test + + + org.apache.hadoop + hadoop-common + + + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice-hbase-common + test + + + org.apache.hadoop + hadoop-common + + + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice-hbase-server test diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java index b2029ca1f5..33d8dcd37b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java @@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -70,7 +70,7 @@ public class TestTimelineReaderWebServicesHBaseStorage extends AbstractTimelineReaderHBaseTestBase { private static long ts = System.currentTimeMillis(); private static long dayTs = - HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts); + HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(ts); private static String doAsUser = "remoteuser"; @BeforeClass @@ -371,7 +371,7 @@ static void writeApplicationEntities(HBaseTimelineWriterImpl hbi, BuilderUtils.newApplicationId(timestamp, count++); ApplicationEntity appEntity = new ApplicationEntity(); appEntity.setId( - HBaseTimelineStorageUtils.convertApplicationIdToString(appId)); + HBaseTimelineSchemaUtils.convertApplicationIdToString(appId)); appEntity.setCreatedTime(timestamp); TimelineEvent created = new TimelineEvent(); @@ -929,7 +929,7 @@ public void testGetFlows() throws Exception { new String[] {"flow1"}); long firstFlowActivity = - HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(1425016501000L); + HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(1425016501000L); DateFormat fmt = TimelineReaderWebServices.DATE_FORMAT.get(); uri = URI.create("http://localhost:" + getServerPort() + "/ws/v2/" + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java index 111008a5d4..bc33427de8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java @@ -68,10 +68,11 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey; -import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter; @@ -173,7 +174,7 @@ public void testWriteNullApplicationToHBase() throws Exception { scan.setStartRow(Bytes.toBytes(cluster)); scan.setStopRow(Bytes.toBytes(cluster + "1")); Connection conn = ConnectionFactory.createConnection(c1); - ResultScanner resultScanner = new ApplicationTable() + ResultScanner resultScanner = new ApplicationTableRW() .getResultScanner(c1, conn, scan); assertTrue(resultScanner != null); @@ -308,7 +309,7 @@ public void testWriteApplicationToHBase() throws Exception { Get get = new Get(rowKey); get.setMaxVersions(Integer.MAX_VALUE); Connection conn = ConnectionFactory.createConnection(c1); - Result result = new ApplicationTable().getResult(c1, conn, get); + Result result = new ApplicationTableRW().getResult(c1, conn, get); assertTrue(result != null); assertEquals(17, result.size()); @@ -319,24 +320,24 @@ public void testWriteApplicationToHBase() throws Exception { appId)); // check info column family - String id1 = ApplicationColumn.ID.readResult(result).toString(); + String id1 = + ColumnRWHelper.readResult(result, ApplicationColumn.ID).toString(); assertEquals(appId, id1); - Long cTime1 = - (Long) ApplicationColumn.CREATED_TIME.readResult(result); + Long cTime1 = (Long) + ColumnRWHelper.readResult(result, ApplicationColumn.CREATED_TIME); assertEquals(cTime, cTime1); - Map infoColumns = - ApplicationColumnPrefix.INFO.readResults(result, - new StringKeyConverter()); + Map infoColumns = ColumnRWHelper.readResults( + result, ApplicationColumnPrefix.INFO, new StringKeyConverter()); assertEquals(infoMap, infoColumns); // Remember isRelatedTo is of type Map> for (Map.Entry> isRelatedToEntry : isRelatedTo .entrySet()) { - Object isRelatedToValue = - ApplicationColumnPrefix.IS_RELATED_TO.readResult(result, - isRelatedToEntry.getKey()); + Object isRelatedToValue = ColumnRWHelper.readResult( + result, ApplicationColumnPrefix.IS_RELATED_TO, + isRelatedToEntry.getKey()); String compoundValue = isRelatedToValue.toString(); // id7?id9?id6 Set isRelatedToValues = @@ -351,9 +352,9 @@ public void testWriteApplicationToHBase() throws Exception { // RelatesTo for (Map.Entry> relatesToEntry : relatesTo .entrySet()) { - String compoundValue = - ApplicationColumnPrefix.RELATES_TO.readResult(result, - relatesToEntry.getKey()).toString(); + String compoundValue = ColumnRWHelper.readResult(result, + ApplicationColumnPrefix.RELATES_TO, relatesToEntry.getKey()) + .toString(); // id3?id4?id5 Set relatesToValues = new HashSet(Separator.VALUES.splitEncoded(compoundValue)); @@ -366,14 +367,13 @@ public void testWriteApplicationToHBase() throws Exception { KeyConverter stringKeyConverter = new StringKeyConverter(); // Configuration - Map configColumns = - ApplicationColumnPrefix.CONFIG - .readResults(result, stringKeyConverter); + Map configColumns = ColumnRWHelper.readResults( + result, ApplicationColumnPrefix.CONFIG, stringKeyConverter); assertEquals(conf, configColumns); NavigableMap> metricsResult = - ApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result, - stringKeyConverter); + ColumnRWHelper.readResultsWithTimestamps( + result, ApplicationColumnPrefix.METRIC, stringKeyConverter); NavigableMap metricMap = metricsResult.get(m1.getId()); matchMetrics(metricValues, metricMap); @@ -500,7 +500,7 @@ public void testEvents() throws IOException { event.addInfo(expKey, expVal); final TimelineEntity entity = new ApplicationEntity(); - entity.setId(HBaseTimelineStorageUtils.convertApplicationIdToString( + entity.setId(HBaseTimelineSchemaUtils.convertApplicationIdToString( ApplicationId.newInstance(0, 1))); entity.addEvent(event); @@ -531,7 +531,7 @@ public void testEvents() throws IOException { Get get = new Get(rowKey); get.setMaxVersions(Integer.MAX_VALUE); Connection conn = ConnectionFactory.createConnection(c1); - Result result = new ApplicationTable().getResult(c1, conn, get); + Result result = new ApplicationTableRW().getResult(c1, conn, get); assertTrue(result != null); @@ -541,8 +541,8 @@ public void testEvents() throws IOException { appName)); Map eventsResult = - ApplicationColumnPrefix.EVENT.readResults(result, - new EventColumnNameConverter()); + ColumnRWHelper.readResults(result, + ApplicationColumnPrefix.EVENT, new EventColumnNameConverter()); // there should be only one event assertEquals(1, eventsResult.size()); for (Map.Entry e : eventsResult.entrySet()) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java index 5e089992ba..90a69595e5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java @@ -62,9 +62,10 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter; @@ -73,12 +74,12 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -208,7 +209,7 @@ public void testWriteEntityToHBase() throws Exception { String flow = "some_flow_name"; String flowVersion = "AB7822C10F1111"; long runid = 1002345678919L; - String appName = HBaseTimelineStorageUtils.convertApplicationIdToString( + String appName = HBaseTimelineSchemaUtils.convertApplicationIdToString( ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1) ); hbi.write(new TimelineCollectorContext(cluster, user, flow, flowVersion, @@ -224,7 +225,7 @@ public void testWriteEntityToHBase() throws Exception { s.setStartRow(startRow); s.setMaxVersions(Integer.MAX_VALUE); Connection conn = ConnectionFactory.createConnection(c1); - ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s); + ResultScanner scanner = new EntityTableRW().getResultScanner(c1, conn, s); int rowCount = 0; int colCount = 0; @@ -238,26 +239,27 @@ public void testWriteEntityToHBase() throws Exception { entity)); // check info column family - String id1 = EntityColumn.ID.readResult(result).toString(); + String id1 = + ColumnRWHelper.readResult(result, EntityColumn.ID).toString(); assertEquals(id, id1); - String type1 = EntityColumn.TYPE.readResult(result).toString(); + String type1 = + ColumnRWHelper.readResult(result, EntityColumn.TYPE).toString(); assertEquals(type, type1); - Long cTime1 = (Long) EntityColumn.CREATED_TIME.readResult(result); + Long cTime1 = (Long) + ColumnRWHelper.readResult(result, EntityColumn.CREATED_TIME); assertEquals(cTime1, cTime); - Map infoColumns = - EntityColumnPrefix.INFO.readResults(result, - new StringKeyConverter()); + Map infoColumns = ColumnRWHelper.readResults( + result, EntityColumnPrefix.INFO, new StringKeyConverter()); assertEquals(infoMap, infoColumns); // Remember isRelatedTo is of type Map> for (Map.Entry> isRelatedToEntry : isRelatedTo .entrySet()) { - Object isRelatedToValue = - EntityColumnPrefix.IS_RELATED_TO.readResult(result, - isRelatedToEntry.getKey()); + Object isRelatedToValue = ColumnRWHelper.readResult(result, + EntityColumnPrefix.IS_RELATED_TO, isRelatedToEntry.getKey()); String compoundValue = isRelatedToValue.toString(); // id7?id9?id6 Set isRelatedToValues = @@ -273,8 +275,9 @@ public void testWriteEntityToHBase() throws Exception { // RelatesTo for (Map.Entry> relatesToEntry : relatesTo .entrySet()) { - String compoundValue = EntityColumnPrefix.RELATES_TO - .readResult(result, relatesToEntry.getKey()).toString(); + String compoundValue = ColumnRWHelper.readResult(result, + EntityColumnPrefix.RELATES_TO, relatesToEntry.getKey()) + .toString(); // id3?id4?id5 Set relatesToValues = new HashSet( @@ -287,13 +290,13 @@ public void testWriteEntityToHBase() throws Exception { } // Configuration - Map configColumns = - EntityColumnPrefix.CONFIG.readResults(result, stringKeyConverter); + Map configColumns = ColumnRWHelper.readResults( + result, EntityColumnPrefix.CONFIG, stringKeyConverter); assertEquals(conf, configColumns); NavigableMap> metricsResult = - EntityColumnPrefix.METRIC.readResultsWithTimestamps(result, - stringKeyConverter); + ColumnRWHelper.readResultsWithTimestamps( + result, EntityColumnPrefix.METRIC, stringKeyConverter); NavigableMap metricMap = metricsResult.get(m1.getId()); matchMetrics(metricValues, metricMap); @@ -386,14 +389,14 @@ private void verifySubApplicationTableEntities(String cluster, String user, Set metrics, Long cTime, TimelineMetric m1) throws IOException { Scan s = new Scan(); - // read from SubApplicationTable + // read from SubApplicationTableRW byte[] startRow = new SubApplicationRowKeyPrefix(cluster, subAppUser, null, null, null, null).getRowKeyPrefix(); s.setStartRow(startRow); s.setMaxVersions(Integer.MAX_VALUE); Connection conn = ConnectionFactory.createConnection(c1); ResultScanner scanner = - new SubApplicationTable().getResultScanner(c1, conn, s); + new SubApplicationTableRW().getResultScanner(c1, conn, s); int rowCount = 0; int colCount = 0; @@ -407,25 +410,28 @@ private void verifySubApplicationTableEntities(String cluster, String user, user, entity)); // check info column family - String id1 = SubApplicationColumn.ID.readResult(result).toString(); + String id1 = ColumnRWHelper.readResult(result, SubApplicationColumn.ID) + .toString(); assertEquals(id, id1); - String type1 = SubApplicationColumn.TYPE.readResult(result).toString(); + String type1 = ColumnRWHelper.readResult(result, + SubApplicationColumn.TYPE).toString(); assertEquals(type, type1); - Long cTime1 = - (Long) SubApplicationColumn.CREATED_TIME.readResult(result); + Long cTime1 = (Long) ColumnRWHelper.readResult(result, + SubApplicationColumn.CREATED_TIME); assertEquals(cTime1, cTime); - Map infoColumns = SubApplicationColumnPrefix.INFO - .readResults(result, new StringKeyConverter()); + Map infoColumns = ColumnRWHelper.readResults( + result, SubApplicationColumnPrefix.INFO, new StringKeyConverter()); assertEquals(infoMap, infoColumns); // Remember isRelatedTo is of type Map> for (Map.Entry> isRelatedToEntry : isRelatedTo .entrySet()) { - Object isRelatedToValue = SubApplicationColumnPrefix.IS_RELATED_TO - .readResult(result, isRelatedToEntry.getKey()); + Object isRelatedToValue = ColumnRWHelper.readResult( + result, SubApplicationColumnPrefix.IS_RELATED_TO, + isRelatedToEntry.getKey()); String compoundValue = isRelatedToValue.toString(); // id7?id9?id6 Set isRelatedToValues = @@ -440,8 +446,9 @@ private void verifySubApplicationTableEntities(String cluster, String user, // RelatesTo for (Map.Entry> relatesToEntry : relatesTo .entrySet()) { - String compoundValue = SubApplicationColumnPrefix.RELATES_TO - .readResult(result, relatesToEntry.getKey()).toString(); + String compoundValue = ColumnRWHelper.readResult(result, + SubApplicationColumnPrefix.RELATES_TO, relatesToEntry.getKey()) + .toString(); // id3?id4?id5 Set relatesToValues = new HashSet(Separator.VALUES.splitEncoded(compoundValue)); @@ -453,13 +460,13 @@ private void verifySubApplicationTableEntities(String cluster, String user, } // Configuration - Map configColumns = SubApplicationColumnPrefix.CONFIG - .readResults(result, stringKeyConverter); + Map configColumns = ColumnRWHelper.readResults( + result, SubApplicationColumnPrefix.CONFIG, stringKeyConverter); assertEquals(conf, configColumns); NavigableMap> metricsResult = - SubApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result, - stringKeyConverter); + ColumnRWHelper.readResultsWithTimestamps( + result, SubApplicationColumnPrefix.METRIC, stringKeyConverter); NavigableMap metricMap = metricsResult.get(m1.getId()); matchMetrics(metricValues, metricMap); @@ -511,7 +518,7 @@ public void testEventsWithEmptyInfo() throws IOException { String flow = "other_flow_name"; String flowVersion = "1111F01C2287BA"; long runid = 1009876543218L; - String appName = HBaseTimelineStorageUtils.convertApplicationIdToString( + String appName = HBaseTimelineSchemaUtils.convertApplicationIdToString( ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1)); byte[] startRow = new EntityRowKeyPrefix(cluster, user, flow, runid, appName) @@ -525,7 +532,7 @@ public void testEventsWithEmptyInfo() throws IOException { s.setStartRow(startRow); s.addFamily(EntityColumnFamily.INFO.getBytes()); Connection conn = ConnectionFactory.createConnection(c1); - ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s); + ResultScanner scanner = new EntityTableRW().getResultScanner(c1, conn, s); int rowCount = 0; for (Result result : scanner) { @@ -538,8 +545,8 @@ public void testEventsWithEmptyInfo() throws IOException { entity)); Map eventsResult = - EntityColumnPrefix.EVENT.readResults(result, - new EventColumnNameConverter()); + ColumnRWHelper.readResults(result, + EntityColumnPrefix.EVENT, new EventColumnNameConverter()); // there should be only one event assertEquals(1, eventsResult.size()); for (Map.Entry e : eventsResult.entrySet()) { @@ -604,7 +611,7 @@ public void testEventsEscapeTs() throws IOException { final TimelineEntity entity = new ApplicationEntity(); entity.setId( - HBaseTimelineStorageUtils.convertApplicationIdToString( + HBaseTimelineSchemaUtils.convertApplicationIdToString( ApplicationId.newInstance(0, 1))); entity.addEvent(event); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java index 0dcd171ec5..f838178449 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java @@ -21,6 +21,9 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW; import org.junit.BeforeClass; import org.junit.Test; @@ -35,10 +38,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable; - /** * Unit tests for checking different schema prefixes. */ @@ -61,22 +60,24 @@ public void createWithDefaultPrefix() throws IOException { conn = ConnectionFactory.createConnection(hbaseConf); Admin admin = conn.getAdmin(); - TableName entityTableName = BaseTable.getTableName(hbaseConf, - EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME); + TableName entityTableName = BaseTableRW.getTableName(hbaseConf, + EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME); assertTrue(admin.tableExists(entityTableName)); assertTrue(entityTableName.getNameAsString().startsWith( YarnConfiguration.DEFAULT_TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX)); - Table entityTable = conn.getTable(BaseTable.getTableName(hbaseConf, - EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME)); + Table entityTable = conn.getTable(BaseTableRW.getTableName(hbaseConf, + EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME)); assertNotNull(entityTable); - TableName flowRunTableName = BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME); + TableName flowRunTableName = BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME); assertTrue(admin.tableExists(flowRunTableName)); assertTrue(flowRunTableName.getNameAsString().startsWith( YarnConfiguration.DEFAULT_TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX)); - Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table flowRunTable = conn.getTable( + BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); assertNotNull(flowRunTable); } @@ -91,20 +92,22 @@ public void createWithSetPrefix() throws IOException { conn = ConnectionFactory.createConnection(hbaseConf); Admin admin = conn.getAdmin(); - TableName entityTableName = BaseTable.getTableName(hbaseConf, - EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME); + TableName entityTableName = BaseTableRW.getTableName(hbaseConf, + EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME); assertTrue(admin.tableExists(entityTableName)); assertTrue(entityTableName.getNameAsString().startsWith(prefix)); - Table entityTable = conn.getTable(BaseTable.getTableName(hbaseConf, - EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME)); + Table entityTable = conn.getTable(BaseTableRW.getTableName(hbaseConf, + EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME)); assertNotNull(entityTable); - TableName flowRunTableName = BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME); + TableName flowRunTableName = BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME); assertTrue(admin.tableExists(flowRunTableName)); assertTrue(flowRunTableName.getNameAsString().startsWith(prefix)); - Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table flowRunTable = conn.getTable( + BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); assertNotNull(flowRunTable); // create another set with a diff prefix @@ -114,20 +117,22 @@ public void createWithSetPrefix() throws IOException { hbaseConf.set(YarnConfiguration.TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX_NAME, prefix); DataGeneratorForTest.createSchema(hbaseConf); - entityTableName = BaseTable.getTableName(hbaseConf, - EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME); + entityTableName = BaseTableRW.getTableName(hbaseConf, + EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME); assertTrue(admin.tableExists(entityTableName)); assertTrue(entityTableName.getNameAsString().startsWith(prefix)); - entityTable = conn.getTable(BaseTable.getTableName(hbaseConf, - EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME)); + entityTable = conn.getTable(BaseTableRW.getTableName(hbaseConf, + EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME)); assertNotNull(entityTable); - flowRunTableName = BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME); + flowRunTableName = BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME); assertTrue(admin.tableExists(flowRunTableName)); assertTrue(flowRunTableName.getNameAsString().startsWith(prefix)); - flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + flowRunTable = conn.getTable( + BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); assertNotNull(flowRunTable); hbaseConf .unset(YarnConfiguration.TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX_NAME); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java index 4bf221e2ad..645b7d5dcc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java @@ -52,9 +52,9 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.DataGeneratorForTest; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -161,8 +161,8 @@ public void testWriteFlowRunMinMax() throws Exception { Connection conn = ConnectionFactory.createConnection(c1); // check in flow activity table Table table1 = conn.getTable( - BaseTable.getTableName(c1, FlowActivityTable.TABLE_NAME_CONF_NAME, - FlowActivityTable.DEFAULT_TABLE_NAME)); + BaseTableRW.getTableName(c1, FlowActivityTableRW.TABLE_NAME_CONF_NAME, + FlowActivityTableRW.DEFAULT_TABLE_NAME)); byte[] startRow = new FlowActivityRowKey(cluster, minStartTs, user, flow).getRowKey(); Get g = new Get(startRow); @@ -178,7 +178,7 @@ public void testWriteFlowRunMinMax() throws Exception { assertEquals(cluster, flowActivityRowKey.getClusterId()); assertEquals(user, flowActivityRowKey.getUserId()); assertEquals(flow, flowActivityRowKey.getFlowName()); - Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(minStartTs); + Long dayTs = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(minStartTs); assertEquals(dayTs, flowActivityRowKey.getDayTimestamp()); assertEquals(1, values.size()); checkFlowActivityRunId(runid, flowVersion, values); @@ -292,8 +292,8 @@ private void checkFlowActivityTable(String cluster, String user, String flow, s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); Table table1 = conn.getTable( - BaseTable.getTableName(c1, FlowActivityTable.TABLE_NAME_CONF_NAME, - FlowActivityTable.DEFAULT_TABLE_NAME)); + BaseTableRW.getTableName(c1, FlowActivityTableRW.TABLE_NAME_CONF_NAME, + FlowActivityTableRW.DEFAULT_TABLE_NAME)); ResultScanner scanner = table1.getScanner(s); int rowCount = 0; for (Result result : scanner) { @@ -309,7 +309,7 @@ private void checkFlowActivityTable(String cluster, String user, String flow, assertEquals(cluster, flowActivityRowKey.getClusterId()); assertEquals(user, flowActivityRowKey.getUserId()); assertEquals(flow, flowActivityRowKey.getFlowName()); - Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp( + Long dayTs = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp( appCreatedTime); assertEquals(dayTs, flowActivityRowKey.getDayTimestamp()); assertEquals(1, values.size()); @@ -401,7 +401,7 @@ public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException { assertEquals(user, flowActivity.getUser()); assertEquals(flow, flowActivity.getFlowName()); long dayTs = - HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(appCreatedTime); + HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(appCreatedTime); assertEquals(dayTs, flowActivity.getDate().getTime()); Set flowRuns = flowActivity.getFlowRuns(); assertEquals(3, flowRuns.size()); @@ -442,8 +442,8 @@ private void checkFlowActivityTableSeveralRuns(String cluster, String user, s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); Table table1 = conn.getTable( - BaseTable.getTableName(c1, FlowActivityTable.TABLE_NAME_CONF_NAME, - FlowActivityTable.DEFAULT_TABLE_NAME)); + BaseTableRW.getTableName(c1, FlowActivityTableRW.TABLE_NAME_CONF_NAME, + FlowActivityTableRW.DEFAULT_TABLE_NAME)); ResultScanner scanner = table1.getScanner(s); int rowCount = 0; for (Result result : scanner) { @@ -456,7 +456,7 @@ private void checkFlowActivityTableSeveralRuns(String cluster, String user, assertEquals(cluster, flowActivityRowKey.getClusterId()); assertEquals(user, flowActivityRowKey.getUserId()); assertEquals(flow, flowActivityRowKey.getFlowName()); - Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp( + Long dayTs = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp( appCreatedTime); assertEquals(dayTs, flowActivityRowKey.getDayTimestamp()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java index 1ad02e1886..622b0eba22 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java @@ -62,9 +62,9 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -91,8 +91,8 @@ public static void setupBeforeClass() throws Exception { @Test public void checkCoProcessorOff() throws IOException, InterruptedException { Configuration hbaseConf = util.getConfiguration(); - TableName table = BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME); + TableName table = BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME); Connection conn = null; conn = ConnectionFactory.createConnection(hbaseConf); Admin admin = conn.getAdmin(); @@ -106,9 +106,9 @@ public void checkCoProcessorOff() throws IOException, InterruptedException { checkCoprocessorExists(table, true); } - table = BaseTable.getTableName(hbaseConf, - FlowActivityTable.TABLE_NAME_CONF_NAME, - FlowActivityTable.DEFAULT_TABLE_NAME); + table = BaseTableRW.getTableName(hbaseConf, + FlowActivityTableRW.TABLE_NAME_CONF_NAME, + FlowActivityTableRW.DEFAULT_TABLE_NAME); if (admin.tableExists(table)) { // check the regions. // check in flow activity table @@ -116,8 +116,8 @@ public void checkCoProcessorOff() throws IOException, InterruptedException { checkCoprocessorExists(table, false); } - table = BaseTable.getTableName(hbaseConf, EntityTable.TABLE_NAME_CONF_NAME, - EntityTable.DEFAULT_TABLE_NAME); + table = BaseTableRW.getTableName(hbaseConf, + EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME); if (admin.tableExists(table)) { // check the regions. // check in entity run table @@ -224,8 +224,10 @@ public void testWriteFlowRunMinMax() throws Exception { Connection conn = ConnectionFactory.createConnection(c1); // check in flow run table - Table table1 = conn.getTable(BaseTable.getTableName(c1, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table table1 = conn.getTable( + BaseTableRW.getTableName(c1, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); // scan the table and see that we get back the right min and max // timestamps byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey(); @@ -380,8 +382,10 @@ void checkFlowRunTableBatchLimit(String cluster, String user, String flow, .getRowKey(); s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); - Table table1 = conn.getTable(BaseTable.getTableName(c1, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table table1 = conn.getTable( + BaseTableRW.getTableName(c1, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); ResultScanner scanner = table1.getScanner(s); int loopCount = 0; @@ -525,8 +529,10 @@ private void checkFlowRunTable(String cluster, String user, String flow, new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey(); s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); - Table table1 = conn.getTable(BaseTable.getTableName(c1, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table table1 = conn.getTable( + BaseTableRW.getTableName(c1, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); ResultScanner scanner = table1.getScanner(s); int rowCount = 0; @@ -810,8 +816,10 @@ private void checkMinMaxFlush(Configuration c1, long minTS, long startTs, boolean checkMax) throws IOException { Connection conn = ConnectionFactory.createConnection(c1); // check in flow run table - Table table1 = conn.getTable(BaseTable.getTableName(c1, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table table1 = conn.getTable( + BaseTableRW.getTableName(c1, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); // scan the table and see that we get back the right min and max // timestamps byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java index 0ef8260dee..31be285e41 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java @@ -54,9 +54,9 @@ import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext; import org.apache.hadoop.yarn.server.timelineservice.storage.DataGeneratorForTest; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineServerUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator; import org.junit.AfterClass; @@ -107,8 +107,10 @@ public void testWriteNonNumericData() throws Exception { Configuration hbaseConf = util.getConfiguration(); Connection conn = null; conn = ConnectionFactory.createConnection(hbaseConf); - Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table flowRunTable = conn.getTable( + BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); flowRunTable.put(p); Get g = new Get(rowKeyBytes); @@ -156,8 +158,10 @@ public void testWriteScanBatchLimit() throws Exception { Configuration hbaseConf = util.getConfiguration(); Connection conn = null; conn = ConnectionFactory.createConnection(hbaseConf); - Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table flowRunTable = conn.getTable( + BaseTableRW.getTableName(hbaseConf, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); flowRunTable.put(p); String rowKey2 = "nonNumericRowKey2"; @@ -324,10 +328,12 @@ public void testWriteFlowRunCompaction() throws Exception { // check in flow run table HRegionServer server = util.getRSForFirstRegionInTable( - BaseTable.getTableName(c1, FlowRunTable.TABLE_NAME_CONF_NAME, - FlowRunTable.DEFAULT_TABLE_NAME)); - List regions = server.getOnlineRegions(BaseTable.getTableName(c1, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + BaseTableRW.getTableName(c1, FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); + List regions = server.getOnlineRegions( + BaseTableRW.getTableName(c1, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); assertTrue("Didn't find any regions for primary table!", regions.size() > 0); // flush and compact all the regions of the primary table @@ -352,8 +358,10 @@ private void checkFlowRunTable(String cluster, String user, String flow, new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey(); s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); - Table table1 = conn.getTable(BaseTable.getTableName(c1, - FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME)); + Table table1 = conn.getTable( + BaseTableRW.getTableName(c1, + FlowRunTableRW.TABLE_NAME_CONF_NAME, + FlowRunTableRW.DEFAULT_TABLE_NAME)); ResultScanner scanner = table1.getScanner(s); int rowCount = 0; @@ -420,7 +428,7 @@ public void checkProcessSummationMoreCellsSumFinal2() tags.add(t); byte[] tagByteArray = Tag.fromList(tags); // create a cell with a VERY old timestamp and attribute SUM_FINAL - Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cell1Ts, Bytes.toBytes(cellValue1), tagByteArray); currentColumnCells.add(c1); @@ -430,7 +438,7 @@ public void checkProcessSummationMoreCellsSumFinal2() tags.add(t); tagByteArray = Tag.fromList(tags); // create a cell with a recent timestamp and attribute SUM_FINAL - Cell c2 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c2 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cell2Ts, Bytes.toBytes(cellValue2), tagByteArray); currentColumnCells.add(c2); @@ -440,7 +448,7 @@ public void checkProcessSummationMoreCellsSumFinal2() tags.add(t); tagByteArray = Tag.fromList(tags); // create a cell with a VERY old timestamp but has attribute SUM - Cell c3 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c3 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cell3Ts, Bytes.toBytes(cellValue3), tagByteArray); currentColumnCells.add(c3); @@ -450,7 +458,7 @@ public void checkProcessSummationMoreCellsSumFinal2() tags.add(t); tagByteArray = Tag.fromList(tags); // create a cell with a VERY old timestamp but has attribute SUM - Cell c4 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c4 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cell4Ts, Bytes.toBytes(cellValue4), tagByteArray); currentColumnCells.add(c4); @@ -520,7 +528,7 @@ public void checkProcessSummationMoreCellsSumFinalMany() throws IOException { tags.add(t); byte[] tagByteArray = Tag.fromList(tags); // create a cell with a VERY old timestamp and attribute SUM_FINAL - c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier, + c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cellTsFinal, Bytes.toBytes(cellValueFinal), tagByteArray); currentColumnCells.add(c1); cellTsFinal++; @@ -534,7 +542,7 @@ public void checkProcessSummationMoreCellsSumFinalMany() throws IOException { tags.add(t); byte[] tagByteArray = Tag.fromList(tags); // create a cell with attribute SUM - c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier, + c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cellTsNotFinal, Bytes.toBytes(cellValueNotFinal), tagByteArray); currentColumnCells.add(c1); cellTsNotFinal++; @@ -611,7 +619,7 @@ public void checkProcessSummationMoreCellsSumFinalVariedTags() tags.add(t); byte[] tagByteArray = Tag.fromList(tags); // create a cell with a VERY old timestamp and attribute SUM_FINAL - c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier, + c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cellTsFinal, Bytes.toBytes(cellValueFinal), tagByteArray); currentColumnCells.add(c1); cellTsFinal++; @@ -625,7 +633,7 @@ public void checkProcessSummationMoreCellsSumFinalVariedTags() tags.add(t); byte[] tagByteArray = Tag.fromList(tags); // create a cell with a VERY old timestamp and attribute SUM_FINAL - c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier, + c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cellTsFinalNotExpire, Bytes.toBytes(cellValueFinal), tagByteArray); currentColumnCells.add(c1); cellTsFinalNotExpire++; @@ -639,7 +647,7 @@ public void checkProcessSummationMoreCellsSumFinalVariedTags() tags.add(t); byte[] tagByteArray = Tag.fromList(tags); // create a cell with attribute SUM - c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier, + c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, cellTsNotFinal, Bytes.toBytes(cellValueNotFinal), tagByteArray); currentColumnCells.add(c1); cellTsNotFinal++; @@ -696,7 +704,7 @@ public void testProcessSummationMoreCellsSumFinal() throws IOException { SortedSet currentColumnCells = new TreeSet(KeyValue.COMPARATOR); // create a cell with a VERY old timestamp and attribute SUM_FINAL - Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, 120L, Bytes.toBytes(cellValue1), tagByteArray); currentColumnCells.add(c1); @@ -707,7 +715,7 @@ public void testProcessSummationMoreCellsSumFinal() throws IOException { tagByteArray = Tag.fromList(tags); // create a cell with a VERY old timestamp but has attribute SUM - Cell c2 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c2 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, 130L, Bytes.toBytes(cellValue2), tagByteArray); currentColumnCells.add(c2); List cells = fs.processSummationMajorCompaction(currentColumnCells, @@ -754,7 +762,7 @@ public void testProcessSummationOneCellSumFinal() throws IOException { SortedSet currentColumnCells = new TreeSet(KeyValue.COMPARATOR); // create a cell with a VERY old timestamp - Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, 120L, Bytes.toBytes(1110L), tagByteArray); currentColumnCells.add(c1); @@ -792,7 +800,7 @@ public void testProcessSummationOneCell() throws IOException { SortedSet currentColumnCells = new TreeSet(KeyValue.COMPARATOR); - Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, + Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier, currentTimestamp, Bytes.toBytes(1110L), tagByteArray); currentColumnCells.add(c1); List cells = fs.processSummationMajorCompaction(currentColumnCells, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml new file mode 100644 index 0000000000..a1db497111 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml @@ -0,0 +1,219 @@ + + + + + hadoop-yarn-server-timelineservice-hbase + org.apache.hadoop + 3.2.0-SNAPSHOT + + 4.0.0 + hadoop-yarn-server-timelineservice-hbase-client + Apache Hadoop YARN TimelineService HBase Client + + + + ${project.parent.parent.parent.basedir} + + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice-hbase-common + + + + commons-logging + commons-logging + + + + commons-lang + commons-lang + + + + commons-cli + commons-cli + + + + com.google.guava + guava + + + + org.apache.hadoop + hadoop-annotations + provided + + + + org.apache.hadoop + hadoop-common + provided + + + + + org.apache.hadoop + hadoop-common + test-jar + test + + + + org.apache.hadoop + hadoop-yarn-api + provided + + + + org.apache.hadoop + hadoop-yarn-common + provided + + + + org.apache.hadoop + hadoop-yarn-server-applicationhistoryservice + provided + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice + + + + org.apache.hbase + hbase-common + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.mortbay.jetty + jetty-util + + + + + + org.apache.hbase + hbase-client + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + + + + org.apache.hbase + hbase-server + provided + + + org.apache.hadoop + hadoop-hdfs + + + org.apache.hadoop + hadoop-hdfs-client + + + org.apache.hadoop + hadoop-client + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + + + org.mortbay.jetty + jetty-sslengine + + + + + + junit + junit + test + + + + + + + maven-jar-plugin + + + + test-jar + + test-compile + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + junit + junit + 4.11 + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + package + + copy-dependencies + + + runtime + org.slf4j,org.apache.hadoop,com.github.stephenc.findbugs + ${project.build.directory}/lib + + + + + + + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java similarity index 93% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java index 8b46d32413..2b98eecfc2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; @@ -98,7 +99,7 @@ private static CompareOp getHBaseCompareOp( * @param filter * @return a {@link QualifierFilter} object */ - private static Filter createHBaseColQualPrefixFilter( + private static > Filter createHBaseColQualPrefixFilter( ColumnPrefix colPrefix, TimelinePrefixFilter filter) { return new QualifierFilter(getHBaseCompareOp(filter.getCompareOp()), new BinaryPrefixComparator( @@ -114,8 +115,8 @@ private static Filter createHBaseColQualPrefixFilter( * @param columnPrefix column prefix. * @return a column qualifier filter. */ - public static Filter createHBaseQualifierFilter(CompareOp compareOp, - ColumnPrefix columnPrefix) { + public static > Filter createHBaseQualifierFilter( + CompareOp compareOp, ColumnPrefix columnPrefix) { return new QualifierFilter(compareOp, new BinaryPrefixComparator( columnPrefix.getColumnPrefixBytes(""))); @@ -133,7 +134,8 @@ public static Filter createHBaseQualifierFilter(CompareOp compareOp, * @return a filter list. * @throws IOException if any problem occurs while creating the filters. */ - public static Filter createFilterForConfsOrMetricsToRetrieve( + public static > Filter + createFilterForConfsOrMetricsToRetrieve( TimelineFilterList confsOrMetricToRetrieve, ColumnFamily columnFamily, ColumnPrefix columnPrefix) throws IOException { Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, @@ -164,8 +166,9 @@ public static Filter createFilterForConfsOrMetricsToRetrieve( * @return 2 single column value filters wrapped in a filter list. * @throws IOException if any problem is encountered while encoding value. */ - public static FilterList createSingleColValueFiltersByRange( - Column column, Object startValue, Object endValue) throws IOException { + public static > FilterList + createSingleColValueFiltersByRange(Column column, + Object startValue, Object endValue) throws IOException { FilterList list = new FilterList(); Filter singleColValFilterStart = createHBaseSingleColValueFilter( column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), @@ -190,8 +193,9 @@ public static FilterList createSingleColValueFiltersByRange( * @return a SingleColumnValue Filter * @throws IOException if any exception. */ - public static Filter createHBaseSingleColValueFilter(Column column, - Object value, CompareOp op) throws IOException { + public static > Filter + createHBaseSingleColValueFilter(Column column, + Object value, CompareOp op) throws IOException { Filter singleColValFilter = createHBaseSingleColValueFilter( column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), column.getValueConverter().encodeValue(value), op, true); @@ -263,7 +267,8 @@ public static Set fetchColumnsFromFilterList( * @return A {@link FilterList} object. * @throws IOException if any problem occurs while creating the filter list. */ - public static FilterList createHBaseFilterList(ColumnPrefix colPrefix, + public static > FilterList createHBaseFilterList( + ColumnPrefix colPrefix, TimelineFilterList filterList) throws IOException { FilterList list = new FilterList(getHBaseOperator(filterList.getOperator())); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java similarity index 82% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java index f9381853f2..027505b560 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java @@ -40,10 +40,14 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; @@ -55,20 +59,24 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -121,14 +129,15 @@ protected void serviceInit(Configuration conf) throws Exception { Configuration hbaseConf = HBaseTimelineStorageUtils.getTimelineServiceHBaseConf(conf); conn = ConnectionFactory.createConnection(hbaseConf); - entityTable = new EntityTable().getTableMutator(hbaseConf, conn); - appToFlowTable = new AppToFlowTable().getTableMutator(hbaseConf, conn); - applicationTable = new ApplicationTable().getTableMutator(hbaseConf, conn); - flowRunTable = new FlowRunTable().getTableMutator(hbaseConf, conn); + entityTable = new EntityTableRW().getTableMutator(hbaseConf, conn); + appToFlowTable = new AppToFlowTableRW().getTableMutator(hbaseConf, conn); + applicationTable = + new ApplicationTableRW().getTableMutator(hbaseConf, conn); + flowRunTable = new FlowRunTableRW().getTableMutator(hbaseConf, conn); flowActivityTable = - new FlowActivityTable().getTableMutator(hbaseConf, conn); + new FlowActivityTableRW().getTableMutator(hbaseConf, conn); subApplicationTable = - new SubApplicationTable().getTableMutator(hbaseConf, conn); + new SubApplicationTableRW().getTableMutator(hbaseConf, conn); UserGroupInformation ugi = UserGroupInformation.isSecurityEnabled() ? UserGroupInformation.getLoginUser() : @@ -232,12 +241,12 @@ private void onApplicationCreated(FlowRunRowKey flowRunRowKey, // store in App to flow table AppToFlowRowKey appToFlowRowKey = new AppToFlowRowKey(appId); byte[] rowKey = appToFlowRowKey.getRowKey(); - AppToFlowColumnPrefix.FLOW_NAME.store(rowKey, appToFlowTable, clusterId, - null, flowName); - AppToFlowColumnPrefix.FLOW_RUN_ID.store(rowKey, appToFlowTable, clusterId, - null, flowRunId); - AppToFlowColumnPrefix.USER_ID.store(rowKey, appToFlowTable, clusterId, null, - userId); + ColumnRWHelper.store(rowKey, appToFlowTable, + AppToFlowColumnPrefix.FLOW_NAME, clusterId, null, flowName); + ColumnRWHelper.store(rowKey, appToFlowTable, + AppToFlowColumnPrefix.FLOW_RUN_ID, clusterId, null, flowRunId); + ColumnRWHelper.store(rowKey, appToFlowTable, AppToFlowColumnPrefix.USER_ID, + clusterId, null, userId); // store in flow run table storeAppCreatedInFlowRunTable(flowRunRowKey, appId, te); @@ -248,8 +257,8 @@ private void onApplicationCreated(FlowRunRowKey flowRunRowKey, appCreatedTimeStamp, flowRunRowKey.getUserId(), flowName) .getRowKey(); byte[] qualifier = longKeyConverter.encode(flowRunRowKey.getFlowRunId()); - FlowActivityColumnPrefix.RUN_ID.store(flowActivityRowKeyBytes, - flowActivityTable, qualifier, null, flowVersion, + ColumnRWHelper.store(flowActivityRowKeyBytes, flowActivityTable, + FlowActivityColumnPrefix.RUN_ID, qualifier, null, flowVersion, AggregationCompactionDimension.APPLICATION_ID.getAttribute(appId)); } @@ -259,8 +268,8 @@ private void onApplicationCreated(FlowRunRowKey flowRunRowKey, private void storeAppCreatedInFlowRunTable(FlowRunRowKey flowRunRowKey, String appId, TimelineEntity te) throws IOException { byte[] rowKey = flowRunRowKey.getRowKey(); - FlowRunColumn.MIN_START_TIME.store(rowKey, flowRunTable, null, - te.getCreatedTime(), + ColumnRWHelper.store(rowKey, flowRunTable, FlowRunColumn.MIN_START_TIME, + null, te.getCreatedTime(), AggregationCompactionDimension.APPLICATION_ID.getAttribute(appId)); } @@ -282,8 +291,8 @@ private void onApplicationFinished(FlowRunRowKey flowRunRowKey, appFinishedTimeStamp, flowRunRowKey.getUserId(), flowRunRowKey.getFlowName()).getRowKey(); byte[] qualifier = longKeyConverter.encode(flowRunRowKey.getFlowRunId()); - FlowActivityColumnPrefix.RUN_ID.store(rowKey, flowActivityTable, qualifier, - null, flowVersion, + ColumnRWHelper.store(rowKey, flowActivityTable, + FlowActivityColumnPrefix.RUN_ID, qualifier, null, flowVersion, AggregationCompactionDimension.APPLICATION_ID.getAttribute(appId)); } @@ -296,8 +305,8 @@ private void storeAppFinishedInFlowRunTable(FlowRunRowKey flowRunRowKey, byte[] rowKey = flowRunRowKey.getRowKey(); Attribute attributeAppId = AggregationCompactionDimension.APPLICATION_ID.getAttribute(appId); - FlowRunColumn.MAX_END_TIME.store(rowKey, flowRunTable, null, - appFinishedTimeStamp, attributeAppId); + ColumnRWHelper.store(rowKey, flowRunTable, FlowRunColumn.MAX_END_TIME, + null, appFinishedTimeStamp, attributeAppId); // store the final value of metrics since application has finished Set metrics = te.getMetrics(); @@ -328,7 +337,7 @@ private void storeFlowMetrics(byte[] rowKey, Set metrics, Map timeseries = metric.getValues(); for (Map.Entry timeseriesEntry : timeseries.entrySet()) { Long timestamp = timeseriesEntry.getKey(); - FlowRunColumnPrefix.METRIC.store(rowKey, flowRunTable, + ColumnRWHelper.store(rowKey, flowRunTable, FlowRunColumnPrefix.METRIC, metricColumnQualifier, timestamp, timeseriesEntry.getValue(), attributes); } @@ -338,7 +347,7 @@ private void storeFlowMetrics(byte[] rowKey, Set metrics, /** * Stores the Relations from the {@linkplain TimelineEntity} object. */ - private void storeRelations(byte[] rowKey, + private > void storeRelations(byte[] rowKey, Map> connectedEntities, ColumnPrefix columnPrefix, TypedBufferedMutator table) throws IOException { if (connectedEntities != null) { @@ -347,9 +356,9 @@ private void storeRelations(byte[] rowKey, // id3?id4?id5 String compoundValue = Separator.VALUES.joinEncoded(connectedEntity.getValue()); - columnPrefix.store(rowKey, table, - stringKeyConverter.encode(connectedEntity.getKey()), null, - compoundValue); + ColumnRWHelper.store(rowKey, table, columnPrefix, + stringKeyConverter.encode(connectedEntity.getKey()), + null, compoundValue); } } } @@ -362,11 +371,12 @@ private void store(byte[] rowKey, TimelineEntity te, Tables table) throws IOException { switch (table) { case APPLICATION_TABLE: - ApplicationColumn.ID.store(rowKey, applicationTable, null, te.getId()); - ApplicationColumn.CREATED_TIME.store(rowKey, applicationTable, null, - te.getCreatedTime()); - ApplicationColumn.FLOW_VERSION.store(rowKey, applicationTable, null, - flowVersion); + ColumnRWHelper.store(rowKey, applicationTable, + ApplicationColumn.ID, null, te.getId()); + ColumnRWHelper.store(rowKey, applicationTable, + ApplicationColumn.CREATED_TIME, null, te.getCreatedTime()); + ColumnRWHelper.store(rowKey, applicationTable, + ApplicationColumn.FLOW_VERSION, null, flowVersion); storeInfo(rowKey, te.getInfo(), flowVersion, ApplicationColumnPrefix.INFO, applicationTable); storeMetrics(rowKey, te.getMetrics(), ApplicationColumnPrefix.METRIC, @@ -381,11 +391,14 @@ private void store(byte[] rowKey, TimelineEntity te, ApplicationColumnPrefix.RELATES_TO, applicationTable); break; case ENTITY_TABLE: - EntityColumn.ID.store(rowKey, entityTable, null, te.getId()); - EntityColumn.TYPE.store(rowKey, entityTable, null, te.getType()); - EntityColumn.CREATED_TIME.store(rowKey, entityTable, null, - te.getCreatedTime()); - EntityColumn.FLOW_VERSION.store(rowKey, entityTable, null, flowVersion); + ColumnRWHelper.store(rowKey, entityTable, + EntityColumn.ID, null, te.getId()); + ColumnRWHelper.store(rowKey, entityTable, + EntityColumn.TYPE, null, te.getType()); + ColumnRWHelper.store(rowKey, entityTable, + EntityColumn.CREATED_TIME, null, te.getCreatedTime()); + ColumnRWHelper.store(rowKey, entityTable, + EntityColumn.FLOW_VERSION, null, flowVersion); storeInfo(rowKey, te.getInfo(), flowVersion, EntityColumnPrefix.INFO, entityTable); storeMetrics(rowKey, te.getMetrics(), EntityColumnPrefix.METRIC, @@ -400,14 +413,14 @@ private void store(byte[] rowKey, TimelineEntity te, EntityColumnPrefix.RELATES_TO, entityTable); break; case SUBAPPLICATION_TABLE: - SubApplicationColumn.ID.store(rowKey, subApplicationTable, null, - te.getId()); - SubApplicationColumn.TYPE.store(rowKey, subApplicationTable, null, - te.getType()); - SubApplicationColumn.CREATED_TIME.store(rowKey, subApplicationTable, null, - te.getCreatedTime()); - SubApplicationColumn.FLOW_VERSION.store(rowKey, subApplicationTable, null, - flowVersion); + ColumnRWHelper.store(rowKey, subApplicationTable, SubApplicationColumn.ID, + null, te.getId()); + ColumnRWHelper.store(rowKey, subApplicationTable, + SubApplicationColumn.TYPE, null, te.getType()); + ColumnRWHelper.store(rowKey, subApplicationTable, + SubApplicationColumn.CREATED_TIME, null, te.getCreatedTime()); + ColumnRWHelper.store(rowKey, subApplicationTable, + SubApplicationColumn.FLOW_VERSION, null, flowVersion); storeInfo(rowKey, te.getInfo(), flowVersion, SubApplicationColumnPrefix.INFO, subApplicationTable); storeMetrics(rowKey, te.getMetrics(), SubApplicationColumnPrefix.METRIC, @@ -430,12 +443,13 @@ private void store(byte[] rowKey, TimelineEntity te, /** * stores the info information from {@linkplain TimelineEntity}. */ - private void storeInfo(byte[] rowKey, Map info, - String flowVersion, ColumnPrefix columnPrefix, - TypedBufferedMutator table) throws IOException { + private > void storeInfo(byte[] rowKey, + Map info, String flowVersion, + ColumnPrefix columnPrefix, TypedBufferedMutator table) + throws IOException { if (info != null) { for (Map.Entry entry : info.entrySet()) { - columnPrefix.store(rowKey, table, + ColumnRWHelper.store(rowKey, table, columnPrefix, stringKeyConverter.encode(entry.getKey()), null, entry.getValue()); } } @@ -444,13 +458,15 @@ private void storeInfo(byte[] rowKey, Map info, /** * stores the config information from {@linkplain TimelineEntity}. */ - private void storeConfig(byte[] rowKey, Map config, + private > void storeConfig( + byte[] rowKey, Map config, ColumnPrefix columnPrefix, TypedBufferedMutator table) throws IOException { if (config != null) { for (Map.Entry entry : config.entrySet()) { byte[] configKey = stringKeyConverter.encode(entry.getKey()); - columnPrefix.store(rowKey, table, configKey, null, entry.getValue()); + ColumnRWHelper.store(rowKey, table, columnPrefix, configKey, + null, entry.getValue()); } } } @@ -459,7 +475,8 @@ private void storeConfig(byte[] rowKey, Map config, * stores the {@linkplain TimelineMetric} information from the * {@linkplain TimelineEvent} object. */ - private void storeMetrics(byte[] rowKey, Set metrics, + private > void storeMetrics( + byte[] rowKey, Set metrics, ColumnPrefix columnPrefix, TypedBufferedMutator table) throws IOException { if (metrics != null) { @@ -469,8 +486,8 @@ private void storeMetrics(byte[] rowKey, Set metrics, Map timeseries = metric.getValues(); for (Map.Entry timeseriesEntry : timeseries.entrySet()) { Long timestamp = timeseriesEntry.getKey(); - columnPrefix.store(rowKey, table, metricColumnQualifier, timestamp, - timeseriesEntry.getValue()); + ColumnRWHelper.store(rowKey, table, columnPrefix, + metricColumnQualifier, timestamp, timeseriesEntry.getValue()); } } } @@ -479,7 +496,8 @@ private void storeMetrics(byte[] rowKey, Set metrics, /** * Stores the events from the {@linkplain TimelineEvent} object. */ - private void storeEvents(byte[] rowKey, Set events, + private > void storeEvents( + byte[] rowKey, Set events, ColumnPrefix columnPrefix, TypedBufferedMutator table) throws IOException { if (events != null) { @@ -499,16 +517,16 @@ private void storeEvents(byte[] rowKey, Set events, byte[] columnQualifierBytes = new EventColumnName(eventId, eventTimestamp, null) .getColumnQualifier(); - columnPrefix.store(rowKey, table, columnQualifierBytes, null, - Separator.EMPTY_BYTES); + ColumnRWHelper.store(rowKey, table, columnPrefix, + columnQualifierBytes, null, Separator.EMPTY_BYTES); } else { for (Map.Entry info : eventInfo.entrySet()) { // eventId=infoKey byte[] columnQualifierBytes = new EventColumnName(eventId, eventTimestamp, info.getKey()) .getColumnQualifier(); - columnPrefix.store(rowKey, table, columnQualifierBytes, null, - info.getValue()); + ColumnRWHelper.store(rowKey, table, columnPrefix, + columnQualifierBytes, null, info.getValue()); } // for info: eventInfo } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java similarity index 92% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java index c9f7cecdf5..e9e4770729 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java @@ -37,13 +37,13 @@ import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; @@ -94,26 +94,27 @@ public static void main(String[] args) throws Exception { String entityTableName = commandLine.getOptionValue( ENTITY_TABLE_NAME_SHORT); if (StringUtils.isNotBlank(entityTableName)) { - hbaseConf.set(EntityTable.TABLE_NAME_CONF_NAME, entityTableName); + hbaseConf.set(EntityTableRW.TABLE_NAME_CONF_NAME, entityTableName); } // Grab the entity metrics TTL String entityTableMetricsTTL = commandLine.getOptionValue( ENTITY_METRICS_TTL_OPTION_SHORT); if (StringUtils.isNotBlank(entityTableMetricsTTL)) { int entityMetricsTTL = Integer.parseInt(entityTableMetricsTTL); - new EntityTable().setMetricsTTL(entityMetricsTTL, hbaseConf); + new EntityTableRW().setMetricsTTL(entityMetricsTTL, hbaseConf); } // Grab the appToflowTableName argument String appToflowTableName = commandLine.getOptionValue( APP_TO_FLOW_TABLE_NAME_SHORT); if (StringUtils.isNotBlank(appToflowTableName)) { - hbaseConf.set(AppToFlowTable.TABLE_NAME_CONF_NAME, appToflowTableName); + hbaseConf.set( + AppToFlowTableRW.TABLE_NAME_CONF_NAME, appToflowTableName); } // Grab the applicationTableName argument String applicationTableName = commandLine.getOptionValue( APP_TABLE_NAME_SHORT); if (StringUtils.isNotBlank(applicationTableName)) { - hbaseConf.set(ApplicationTable.TABLE_NAME_CONF_NAME, + hbaseConf.set(ApplicationTableRW.TABLE_NAME_CONF_NAME, applicationTableName); } // Grab the application metrics TTL @@ -121,14 +122,14 @@ public static void main(String[] args) throws Exception { APP_METRICS_TTL_OPTION_SHORT); if (StringUtils.isNotBlank(applicationTableMetricsTTL)) { int appMetricsTTL = Integer.parseInt(applicationTableMetricsTTL); - new ApplicationTable().setMetricsTTL(appMetricsTTL, hbaseConf); + new ApplicationTableRW().setMetricsTTL(appMetricsTTL, hbaseConf); } // Grab the subApplicationTableName argument String subApplicationTableName = commandLine.getOptionValue( SUB_APP_TABLE_NAME_SHORT); if (StringUtils.isNotBlank(subApplicationTableName)) { - hbaseConf.set(SubApplicationTable.TABLE_NAME_CONF_NAME, + hbaseConf.set(SubApplicationTableRW.TABLE_NAME_CONF_NAME, subApplicationTableName); } // Grab the subApplication metrics TTL @@ -136,7 +137,7 @@ public static void main(String[] args) throws Exception { .getOptionValue(SUB_APP_METRICS_TTL_OPTION_SHORT); if (StringUtils.isNotBlank(subApplicationTableMetricsTTL)) { int subAppMetricsTTL = Integer.parseInt(subApplicationTableMetricsTTL); - new SubApplicationTable().setMetricsTTL(subAppMetricsTTL, hbaseConf); + new SubApplicationTableRW().setMetricsTTL(subAppMetricsTTL, hbaseConf); } // create all table schemas in hbase @@ -303,7 +304,7 @@ public static void createAllTables(Configuration hbaseConf, throw new IOException("Cannot create table since admin is null"); } try { - new EntityTable().createTable(admin, hbaseConf); + new EntityTableRW().createTable(admin, hbaseConf); } catch (IOException e) { if (skipExisting) { LOG.warn("Skip and continue on: " + e.getMessage()); @@ -312,7 +313,7 @@ public static void createAllTables(Configuration hbaseConf, } } try { - new AppToFlowTable().createTable(admin, hbaseConf); + new AppToFlowTableRW().createTable(admin, hbaseConf); } catch (IOException e) { if (skipExisting) { LOG.warn("Skip and continue on: " + e.getMessage()); @@ -321,7 +322,7 @@ public static void createAllTables(Configuration hbaseConf, } } try { - new ApplicationTable().createTable(admin, hbaseConf); + new ApplicationTableRW().createTable(admin, hbaseConf); } catch (IOException e) { if (skipExisting) { LOG.warn("Skip and continue on: " + e.getMessage()); @@ -330,7 +331,7 @@ public static void createAllTables(Configuration hbaseConf, } } try { - new FlowRunTable().createTable(admin, hbaseConf); + new FlowRunTableRW().createTable(admin, hbaseConf); } catch (IOException e) { if (skipExisting) { LOG.warn("Skip and continue on: " + e.getMessage()); @@ -339,7 +340,7 @@ public static void createAllTables(Configuration hbaseConf, } } try { - new FlowActivityTable().createTable(admin, hbaseConf); + new FlowActivityTableRW().createTable(admin, hbaseConf); } catch (IOException e) { if (skipExisting) { LOG.warn("Skip and continue on: " + e.getMessage()); @@ -348,7 +349,7 @@ public static void createAllTables(Configuration hbaseConf, } } try { - new SubApplicationTable().createTable(admin, hbaseConf); + new SubApplicationTableRW().createTable(admin, hbaseConf); } catch (IOException e) { if (skipExisting) { LOG.warn("Skip and continue on: " + e.getMessage()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTableRW.java similarity index 66% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTableRW.java index 4da720e7a5..808994e3b0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTableRW.java @@ -26,48 +26,15 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * The application table as column families info, config and metrics. Info - * stores information about a YARN application entity, config stores - * configuration data of a YARN application, metrics stores the metrics of a - * YARN application. This table is entirely analogous to the entity table but - * created for better performance. - * - * Example application table record: - * - *
- * |-------------------------------------------------------------------------|
- * |  Row       | Column Family                | Column Family| Column Family|
- * |  key       | info                         | metrics      | config       |
- * |-------------------------------------------------------------------------|
- * | clusterId! | id:appId                     | metricId1:   | configKey1:  |
- * | userName!  |                              | metricValue1 | configValue1 |
- * | flowName!  | created_time:                | @timestamp1  |              |
- * | flowRunId! | 1392993084018                |              | configKey2:  |
- * | AppId      |                              | metriciD1:   | configValue2 |
- * |            | i!infoKey:                   | metricValue2 |              |
- * |            | infoValue                    | @timestamp2  |              |
- * |            |                              |              |              |
- * |            | r!relatesToKey:              | metricId2:   |              |
- * |            | id3=id4=id5                  | metricValue1 |              |
- * |            |                              | @timestamp2  |              |
- * |            | s!isRelatedToKey:            |              |              |
- * |            | id7=id9=id6                  |              |              |
- * |            |                              |              |              |
- * |            | e!eventId=timestamp=infoKey: |              |              |
- * |            | eventInfoValue               |              |              |
- * |            |                              |              |              |
- * |            | flowVersion:                 |              |              |
- * |            | versionValue                 |              |              |
- * |-------------------------------------------------------------------------|
- * 
+ * Create, read and write to the Application Table. */ -public class ApplicationTable extends BaseTable { +public class ApplicationTableRW extends BaseTableRW { /** application prefix. */ private static final String PREFIX = YarnConfiguration.TIMELINE_SERVICE_PREFIX + "application"; @@ -100,9 +67,9 @@ public class ApplicationTable extends BaseTable { private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000; private static final Logger LOG = - LoggerFactory.getLogger(ApplicationTable.class); + LoggerFactory.getLogger(ApplicationTableRW.class); - public ApplicationTable() { + public ApplicationTableRW() { super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); } @@ -110,8 +77,8 @@ public ApplicationTable() { * (non-Javadoc) * * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, + * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW# + * createTable(org.apache.hadoop.hbase.client.Admin, * org.apache.hadoop.conf.Configuration) */ public void createTable(Admin admin, Configuration hbaseConf) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTableRW.java similarity index 67% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTableRW.java index 04da5c70d4..6460203361 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTableRW.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,42 +33,9 @@ import java.io.IOException; /** - * The app_flow table as column families mapping. Mapping stores - * appId to flowName and flowRunId mapping information - * - * Example app_flow table record: - * - *
- * |--------------------------------------|
- * |  Row       | Column Family           |
- * |  key       | mapping                 |
- * |--------------------------------------|
- * | appId      | flow_name!cluster1:     |
- * |            | foo@daily_hive_report   |
- * |            |                         |
- * |            | flow_run_id!cluster1:   |
- * |            | 1452828720457           |
- * |            |                         |
- * |            | user_id!cluster1:       |
- * |            | admin                   |
- * |            |                         |
- * |            | flow_name!cluster2:     |
- * |            | bar@ad_hoc_query        |
- * |            |                         |
- * |            | flow_run_id!cluster2:   |
- * |            | 1452828498752           |
- * |            |                         |
- * |            | user_id!cluster2:       |
- * |            | joe                     |
- * |            |                         |
- * |--------------------------------------|
- * 
- * - * It is possible (although unlikely) in a multi-cluster environment that there - * may be more than one applications for a given app id. Different clusters are - * recorded as different sets of columns. + * Create, read and write to the AppToFlow Table. */ -public class AppToFlowTable extends BaseTable { +public class AppToFlowTableRW extends BaseTableRW { /** app_flow prefix. */ private static final String PREFIX = YarnConfiguration.TIMELINE_SERVICE_PREFIX + "app-flow"; @@ -80,9 +47,9 @@ public class AppToFlowTable extends BaseTable { private static final String DEFAULT_TABLE_NAME = "timelineservice.app_flow"; private static final Logger LOG = - LoggerFactory.getLogger(AppToFlowTable.class); + LoggerFactory.getLogger(AppToFlowTableRW.class); - public AppToFlowTable() { + public AppToFlowTableRW() { super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); } @@ -90,8 +57,8 @@ public AppToFlowTable() { * (non-Javadoc) * * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, + * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW# + * createTable(org.apache.hadoop.hbase.client.Admin, * org.apache.hadoop.conf.Configuration) */ public void createTable(Admin admin, Configuration hbaseConf) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTableRW.java similarity index 96% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTableRW.java index 93d809c003..12ebce44a4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTableRW.java @@ -37,7 +37,7 @@ * * @param reference to the table instance class itself for type safety. */ -public abstract class BaseTable { +public abstract class BaseTableRW> { /** * Name of config variable that is used to point to this table. @@ -56,7 +56,7 @@ public abstract class BaseTable { * @param defaultTableName Default table name if table from config is not * found. */ - protected BaseTable(String tableNameConfName, String defaultTableName) { + protected BaseTableRW(String tableNameConfName, String defaultTableName) { this.tableNameConfName = tableNameConfName; this.defaultTableName = defaultTableName; } @@ -82,7 +82,7 @@ public TypedBufferedMutator getTableMutator(Configuration hbaseConf, // This is how service initialization should hang on to this variable, with // the proper type TypedBufferedMutator table = - new BufferedMutatorDelegator(bufferedMutator); + new TypedBufferedMutator(bufferedMutator); return table; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnRWHelper.java similarity index 51% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnRWHelper.java index 9f95d44527..a8e5149c3a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnRWHelper.java @@ -15,14 +15,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.server.timelineservice.storage.common; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.NavigableMap; -import java.util.TreeMap; +package org.apache.hadoop.yarn.server.timelineservice.storage.common; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -32,109 +26,37 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; + /** - * This class is meant to be used only by explicit Columns, and not directly to - * write by clients. - * - * @param refers to the table. + * A set of utility functions that read or read to a column. + * This class is meant to be used only by explicit Columns, + * and not directly to write by clients. */ -public class ColumnHelper { +public final class ColumnRWHelper { private static final Logger LOG = LoggerFactory.getLogger(ColumnHelper.class); - private final ColumnFamily columnFamily; - - /** - * Local copy of bytes representation of columnFamily so that we can avoid - * cloning a new copy over and over. - */ - private final byte[] columnFamilyBytes; - - private final ValueConverter converter; - - private final boolean supplementTs; - - public ColumnHelper(ColumnFamily columnFamily) { - this(columnFamily, GenericConverter.getInstance()); - } - - public ColumnHelper(ColumnFamily columnFamily, ValueConverter converter) { - this(columnFamily, converter, false); + private ColumnRWHelper() { } /** - * @param columnFamily column family implementation. - * @param converter converter use to encode/decode values stored in the column - * or column prefix. - * @param needSupplementTs flag to indicate if cell timestamp needs to be - * modified for this column by calling - * {@link TimestampGenerator#getSupplementedTimestamp(long, String)}. This - * would be required for columns(such as metrics in flow run table) where - * potential collisions can occur due to same timestamp. - */ - public ColumnHelper(ColumnFamily columnFamily, ValueConverter converter, - boolean needSupplementTs) { - this.columnFamily = columnFamily; - columnFamilyBytes = columnFamily.getBytes(); - if (converter == null) { - this.converter = GenericConverter.getInstance(); - } else { - this.converter = converter; - } - this.supplementTs = needSupplementTs; - } - - /** - * Sends a Mutation to the table. The mutations will be buffered and sent over - * the wire as part of a batch. - * - * @param rowKey - * identifying the row to write. Nothing gets written when null. - * @param tableMutator - * used to modify the underlying HBase table - * @param columnQualifier - * column qualifier. Nothing gets written when null. - * @param timestamp - * version timestamp. When null the current timestamp multiplied with - * TimestampGenerator.TS_MULTIPLIER and added with last 3 digits of - * app id will be used - * @param inputValue - * the value to write to the rowKey and column qualifier. Nothing - * gets written when null. - * @param attributes Attributes to be set for HBase Put. - * @throws IOException if any problem occurs during store operation(sending - * mutation to table). - */ - public void store(byte[] rowKey, TypedBufferedMutator tableMutator, - byte[] columnQualifier, Long timestamp, Object inputValue, - Attribute... attributes) throws IOException { - if ((rowKey == null) || (columnQualifier == null) || (inputValue == null)) { - return; - } - Put p = new Put(rowKey); - timestamp = getPutTimestamp(timestamp, attributes); - p.addColumn(columnFamilyBytes, columnQualifier, timestamp, - converter.encodeValue(inputValue)); - if ((attributes != null) && (attributes.length > 0)) { - for (Attribute attribute : attributes) { - p.setAttribute(attribute.getName(), attribute.getValue()); - } - } - tableMutator.mutate(p); - } - - /* * Figures out the cell timestamp used in the Put For storing. * Will supplement the timestamp if required. Typically done for flow run * table.If we supplement the timestamp, we left shift the timestamp and * supplement it with the AppId id so that there are no collisions in the flow * run table's cells. */ - private long getPutTimestamp(Long timestamp, Attribute[] attributes) { + private static long getPutTimestamp( + Long timestamp, boolean supplementTs, Attribute[] attributes) { if (timestamp == null) { timestamp = System.currentTimeMillis(); } - if (!this.supplementTs) { + if (!supplementTs) { return timestamp; } else { String appId = getAppIdFromAttributes(attributes); @@ -144,7 +66,7 @@ private long getPutTimestamp(Long timestamp, Attribute[] attributes) { } } - private String getAppIdFromAttributes(Attribute[] attributes) { + private static String getAppIdFromAttributes(Attribute[] attributes) { if (attributes == null) { return null; } @@ -159,10 +81,76 @@ private String getAppIdFromAttributes(Attribute[] attributes) { } /** - * @return the column family for this column implementation. + * Sends a Mutation to the table. The mutations will be buffered and sent over + * the wire as part of a batch. + * + * @param rowKey + * identifying the row to write. Nothing gets written when null. + * @param tableMutator + * used to modify the underlying HBase table + * @param column the column that is to be modified + * @param timestamp + * version timestamp. When null the current timestamp multiplied with + * TimestampGenerator.TS_MULTIPLIER and added with last 3 digits of + * app id will be used + * @param inputValue + * the value to write to the rowKey and column qualifier. Nothing + * gets written when null. + * @param attributes Attributes to be set for HBase Put. + * @throws IOException if any problem occurs during store operation(sending + * mutation to table). */ - public ColumnFamily getColumnFamily() { - return columnFamily; + public static void store(byte[] rowKey, TypedBufferedMutator tableMutator, + Column column, Long timestamp, + Object inputValue, Attribute... attributes) + throws IOException { + store(rowKey, tableMutator, column.getColumnFamilyBytes(), + column.getColumnQualifierBytes(), timestamp, + column.supplementCellTimestamp(), inputValue, + column.getValueConverter(), + column.getCombinedAttrsWithAggr(attributes)); + } + + /** + * Sends a Mutation to the table. The mutations will be buffered and sent over + * the wire as part of a batch. + * + * @param rowKey + * identifying the row to write. Nothing gets written when null. + * @param tableMutator + * used to modify the underlying HBase table + * @param columnFamilyBytes + * @param columnQualifier + * column qualifier. Nothing gets written when null. + * @param timestamp + * version timestamp. When null the current timestamp multiplied with + * TimestampGenerator.TS_MULTIPLIER and added with last 3 digits of + * app id will be used + * @param inputValue + * the value to write to the rowKey and column qualifier. Nothing + * gets written when null. + * @param converter + * @param attributes Attributes to be set for HBase Put. + * @throws IOException if any problem occurs during store operation(sending + * mutation to table). + */ + public static void store(byte[] rowKey, TypedBufferedMutator tableMutator, + byte[] columnFamilyBytes, byte[] columnQualifier, Long timestamp, + boolean supplementTs, Object inputValue, ValueConverter converter, + Attribute... attributes) throws IOException { + if ((rowKey == null) || (columnQualifier == null) || (inputValue == null)) { + return; + } + Put p = new Put(rowKey); + timestamp = getPutTimestamp(timestamp, supplementTs, attributes); + p.addColumn(columnFamilyBytes, columnQualifier, timestamp, + converter.encodeValue(inputValue)); + if ((attributes != null) && (attributes.length > 0)) { + for (Attribute attribute : attributes) { + p.setAttribute(attribute.getName(), attribute.getValue()); + } + } + tableMutator.mutate(p); } /** @@ -170,12 +158,15 @@ public ColumnFamily getColumnFamily() { * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}. * * @param result from which to read the value. Cannot be null + * @param columnFamilyBytes * @param columnQualifierBytes referring to the column to be read. + * @param converter * @return latest version of the specified column of whichever object was * written. * @throws IOException if any problem occurs while reading result. */ - public Object readResult(Result result, byte[] columnQualifierBytes) + public static Object readResult(Result result, byte[] columnFamilyBytes, + byte[] columnQualifierBytes, ValueConverter converter) throws IOException { if (result == null || columnQualifierBytes == null) { return null; @@ -188,6 +179,87 @@ public Object readResult(Result result, byte[] columnQualifierBytes) return converter.decodeValue(value); } + /** + * Get the latest version of this specified column. Note: this call clones the + * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}. + * + * @param result from which to read the value. Cannot be null + * @param column the column that the result can be parsed to + * @return latest version of the specified column of whichever object was + * written. + * @throws IOException if any problem occurs while reading result. + */ + public static Object readResult(Result result, Column column) + throws IOException { + return readResult(result, column.getColumnFamilyBytes(), + column.getColumnQualifierBytes(), column.getValueConverter()); + } + + /** + * Get the latest version of this specified column. Note: this call clones the + * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}. + * + * @param result Cannot be null + * @param columnPrefix column prefix to read from + * @param qualifier column qualifier. Nothing gets read when null. + * @return result object (can be cast to whatever object was written to) or + * null when specified column qualifier for this prefix doesn't exist + * in the result. + * @throws IOException if there is any exception encountered while reading + * result. + */ + public static Object readResult(Result result, ColumnPrefix columnPrefix, + String qualifier) throws IOException { + byte[] columnQualifier = ColumnHelper.getColumnQualifier( + columnPrefix.getColumnPrefixInBytes(), qualifier); + + return readResult( + result, columnPrefix.getColumnFamilyBytes(), + columnQualifier, columnPrefix.getValueConverter()); + } + + /** + * + * @param identifies the type of key converter. + * @param result from which to read columns. + * @param keyConverter used to convert column bytes to the appropriate key + * type + * @return the latest values of columns in the column family with this prefix + * (or all of them if the prefix value is null). + * @throws IOException if there is any exception encountered while reading + * results. + */ + public static Map readResults(Result result, + ColumnPrefix columnPrefix, KeyConverter keyConverter) + throws IOException { + return readResults(result, + columnPrefix.getColumnFamilyBytes(), + columnPrefix.getColumnPrefixInBytes(), + keyConverter, columnPrefix.getValueConverter()); + } + + /** + * @param result from which to reads data with timestamps. + * @param identifies the type of key converter. + * @param the type of the values. The values will be cast into that type. + * @param keyConverter used to convert column bytes to the appropriate key + * type. + * @return the cell values at each respective time in for form + * {@literal {idA={timestamp1->value1}, idA={timestamp2->value2}, + * idB={timestamp3->value3}, idC={timestamp1->value4}}} + * @throws IOException if there is any exception encountered while reading + * result. + */ + public static NavigableMap> + readResultsWithTimestamps(Result result, ColumnPrefix columnPrefix, + KeyConverter keyConverter) throws IOException { + return readResultsWithTimestamps(result, + columnPrefix.getColumnFamilyBytes(), + columnPrefix.getColumnPrefixInBytes(), + keyConverter, columnPrefix.getValueConverter(), + columnPrefix.supplementCellTimeStamp()); + } + /** * @param result from which to reads data with timestamps * @param columnPrefixBytes optional prefix to limit columns. If null all @@ -203,22 +275,24 @@ public Object readResult(Result result, byte[] columnQualifierBytes) * @throws IOException if any problem occurs while reading results. */ @SuppressWarnings("unchecked") - public NavigableMap> - readResultsWithTimestamps(Result result, byte[] columnPrefixBytes, - KeyConverter keyConverter) throws IOException { + public static NavigableMap> + readResultsWithTimestamps(Result result, byte[] columnFamilyBytes, + byte[] columnPrefixBytes, KeyConverter keyConverter, + ValueConverter valueConverter, boolean supplementTs) + throws IOException { NavigableMap> results = new TreeMap<>(); if (result != null) { NavigableMap< byte[], NavigableMap>> resultMap = - result.getMap(); + result.getMap(); NavigableMap> columnCellMap = resultMap.get(columnFamilyBytes); // could be that there is no such column family. if (columnCellMap != null) { - for (Entry> entry : columnCellMap + for (Map.Entry> entry : columnCellMap .entrySet()) { K converterColumnKey = null; if (columnPrefixBytes == null) { @@ -255,9 +329,9 @@ public Object readResult(Result result, byte[] columnQualifierBytes) new TreeMap(); NavigableMap cells = entry.getValue(); if (cells != null) { - for (Entry cell : cells.entrySet()) { + for (Map.Entry cell : cells.entrySet()) { V value = - (V) converter.decodeValue(cell.getValue()); + (V) valueConverter.decodeValue(cell.getValue()); Long ts = supplementTs ? TimestampGenerator. getTruncatedTimestamp(cell.getKey()) : cell.getKey(); cellResults.put(ts, value); @@ -286,14 +360,15 @@ public Object readResult(Result result, byte[] columnQualifierBytes) * returning byte arrays of values that were not Strings. * @throws IOException if any problem occurs while reading results. */ - public Map readResults(Result result, - byte[] columnPrefixBytes, KeyConverter keyConverter) + public static Map readResults(Result result, + byte[] columnFamilyBytes, byte[] columnPrefixBytes, + KeyConverter keyConverter, ValueConverter valueConverter) throws IOException { Map results = new HashMap(); if (result != null) { Map columns = result.getFamilyMap(columnFamilyBytes); - for (Entry entry : columns.entrySet()) { + for (Map.Entry entry : columns.entrySet()) { byte[] columnKey = entry.getKey(); if (columnKey != null && columnKey.length > 0) { @@ -327,7 +402,7 @@ public Map readResults(Result result, // If the columnPrefix is null (we want all columns), or the actual // prefix matches the given prefix we want this column if (converterColumnKey != null) { - Object value = converter.decodeValue(entry.getValue()); + Object value = valueConverter.decodeValue(entry.getValue()); // we return the columnQualifier in parts since we don't know // which part is of which data type. results.put(converterColumnKey, value); @@ -339,76 +414,74 @@ public Map readResults(Result result, } /** - * @param columnPrefixBytes The byte representation for the column prefix. - * Should not contain {@link Separator#QUALIFIERS}. - * @param qualifier for the remainder of the column. - * {@link Separator#QUALIFIERS} is permissible in the qualifier - * as it is joined only with the column prefix bytes. - * @return fully sanitized column qualifier that is a combination of prefix - * and qualifier. If prefix is null, the result is simply the encoded - * qualifier without any separator. + * Sends a Mutation to the table. The mutations will be buffered and sent over + * the wire as part of a batch. + * + * @param rowKey identifying the row to write. Nothing gets written when null. + * @param tableMutator used to modify the underlying HBase table. Caller is + * responsible to pass a mutator for the table that actually has this + * column. + * @param qualifier column qualifier. Nothing gets written when null. + * @param timestamp version timestamp. When null the server timestamp will be + * used. + * @param attributes attributes for the mutation that are used by the + * coprocessor to set/read the cell tags. + * @param inputValue the value to write to the rowKey and column qualifier. + * Nothing gets written when null. + * @throws IOException if there is any exception encountered while doing + * store operation(sending mutation to the table). */ - public static byte[] getColumnQualifier(byte[] columnPrefixBytes, - String qualifier) { - - // We don't want column names to have spaces / tabs. - byte[] encodedQualifier = - Separator.encode(qualifier, Separator.SPACE, Separator.TAB); - if (columnPrefixBytes == null) { - return encodedQualifier; + public static void store(byte[] rowKey, TypedBufferedMutator tableMutator, + ColumnPrefix columnPrefix, byte[] qualifier, Long timestamp, + Object inputValue, Attribute... attributes) throws IOException { + // Null check + if (qualifier == null) { + throw new IOException("Cannot store column with null qualifier in " + +tableMutator.getName().getNameAsString()); } - // Convert qualifier to lower case, strip of separators and tag on column - // prefix. - byte[] columnQualifier = - Separator.QUALIFIERS.join(columnPrefixBytes, encodedQualifier); - return columnQualifier; + byte[] columnQualifier = columnPrefix.getColumnPrefixBytes(qualifier); + Attribute[] combinedAttributes = + columnPrefix.getCombinedAttrsWithAggr(attributes); + + store(rowKey, tableMutator, columnPrefix.getColumnFamilyBytes(), + columnQualifier, timestamp, columnPrefix.supplementCellTimeStamp(), + inputValue, columnPrefix.getValueConverter(), combinedAttributes); } /** - * @param columnPrefixBytes The byte representation for the column prefix. - * Should not contain {@link Separator#QUALIFIERS}. - * @param qualifier for the remainder of the column. - * @return fully sanitized column qualifier that is a combination of prefix - * and qualifier. If prefix is null, the result is simply the encoded - * qualifier without any separator. + * Sends a Mutation to the table. The mutations will be buffered and sent over + * the wire as part of a batch. + * + * @param rowKey identifying the row to write. Nothing gets written when null. + * @param tableMutator used to modify the underlying HBase table. Caller is + * responsible to pass a mutator for the table that actually has this + * column. + * @param qualifier column qualifier. Nothing gets written when null. + * @param timestamp version timestamp. When null the server timestamp will be + * used. + * @param attributes attributes for the mutation that are used by the + * coprocessor to set/read the cell tags. + * @param inputValue the value to write to the rowKey and column qualifier. + * Nothing gets written when null. + * @throws IOException if there is any exception encountered while doing + * store operation(sending mutation to the table). */ - public static byte[] getColumnQualifier(byte[] columnPrefixBytes, - long qualifier) { - - if (columnPrefixBytes == null) { - return Bytes.toBytes(qualifier); + public static void store(byte[] rowKey, TypedBufferedMutator tableMutator, + ColumnPrefix columnPrefix, String qualifier, Long timestamp, + Object inputValue, Attribute... attributes) throws IOException { + // Null check + if (qualifier == null) { + throw new IOException("Cannot store column with null qualifier in " + + tableMutator.getName().getNameAsString()); } - // Convert qualifier to lower case, strip of separators and tag on column - // prefix. - byte[] columnQualifier = - Separator.QUALIFIERS.join(columnPrefixBytes, Bytes.toBytes(qualifier)); - return columnQualifier; + byte[] columnQualifier = columnPrefix.getColumnPrefixBytes(qualifier); + Attribute[] combinedAttributes = + columnPrefix.getCombinedAttrsWithAggr(attributes); + + store(rowKey, tableMutator, columnPrefix.getColumnFamilyBytes(), + columnQualifier, timestamp, columnPrefix.supplementCellTimeStamp(), + inputValue, columnPrefix.getValueConverter(), combinedAttributes); } - - public ValueConverter getValueConverter() { - return converter; - } - - /** - * @param columnPrefixBytes The byte representation for the column prefix. - * Should not contain {@link Separator#QUALIFIERS}. - * @param qualifier the byte representation for the remainder of the column. - * @return fully sanitized column qualifier that is a combination of prefix - * and qualifier. If prefix is null, the result is simply the encoded - * qualifier without any separator. - */ - public static byte[] getColumnQualifier(byte[] columnPrefixBytes, - byte[] qualifier) { - - if (columnPrefixBytes == null) { - return qualifier; - } - - byte[] columnQualifier = - Separator.QUALIFIERS.join(columnPrefixBytes, qualifier); - return columnQualifier; - } - } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java new file mode 100644 index 0000000000..f4cd6fbb28 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java @@ -0,0 +1,121 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.Query; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * A bunch of utility functions used in HBase TimelineService backend. + */ +public final class HBaseTimelineStorageUtils { + private static final Logger LOG = + LoggerFactory.getLogger(HBaseTimelineStorageUtils.class); + + private HBaseTimelineStorageUtils() { + } + + + /** + * @param conf YARN configuration. Used to see if there is an explicit config + * pointing to the HBase config file to read. It should not be null + * or a NullPointerException will be thrown. + * @return a configuration with the HBase configuration from the classpath, + * optionally overwritten by the timeline service configuration URL if + * specified. + * @throws MalformedURLException if a timeline service HBase configuration URL + * is specified but is a malformed URL. + */ + public static Configuration getTimelineServiceHBaseConf(Configuration conf) + throws MalformedURLException { + if (conf == null) { + throw new NullPointerException(); + } + + Configuration hbaseConf; + String timelineServiceHBaseConfFileURL = + conf.get(YarnConfiguration.TIMELINE_SERVICE_HBASE_CONFIGURATION_FILE); + if (timelineServiceHBaseConfFileURL != null + && timelineServiceHBaseConfFileURL.length() > 0) { + LOG.info("Using hbase configuration at " + + timelineServiceHBaseConfFileURL); + // create a clone so that we don't mess with out input one + hbaseConf = new Configuration(conf); + Configuration plainHBaseConf = new Configuration(false); + URL hbaseSiteXML = new URL(timelineServiceHBaseConfFileURL); + plainHBaseConf.addResource(hbaseSiteXML); + HBaseConfiguration.merge(hbaseConf, plainHBaseConf); + } else { + // default to what is on the classpath + hbaseConf = HBaseConfiguration.create(conf); + } + return hbaseConf; + } + + /** + * Given a row key prefix stored in a byte array, return a byte array for its + * immediate next row key. + * + * @param rowKeyPrefix The provided row key prefix, represented in an array. + * @return the closest next row key of the provided row key. + */ + public static byte[] calculateTheClosestNextRowKeyForPrefix( + byte[] rowKeyPrefix) { + // Essentially we are treating it like an 'unsigned very very long' and + // doing +1 manually. + // Search for the place where the trailing 0xFFs start + int offset = rowKeyPrefix.length; + while (offset > 0) { + if (rowKeyPrefix[offset - 1] != (byte) 0xFF) { + break; + } + offset--; + } + + if (offset == 0) { + // We got an 0xFFFF... (only FFs) stopRow value which is + // the last possible prefix before the end of the table. + // So set it to stop at the 'end of the table' + return HConstants.EMPTY_END_ROW; + } + + // Copy the right length of the original + byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset); + // And increment the last one + newStopRow[newStopRow.length - 1]++; + return newStopRow; + } + + public static void setMetricsTimeRange(Query query, byte[] metricsCf, + long tsBegin, long tsEnd) { + if (tsBegin != 0 || tsEnd != Long.MAX_VALUE) { + query.setColumnFamilyTimeRange(metricsCf, + tsBegin, ((tsEnd == Long.MAX_VALUE) ? Long.MAX_VALUE : (tsEnd + 1))); + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java similarity index 93% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java index cf469a5428..29a07e4d95 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java @@ -30,7 +30,7 @@ * * @param The class referring to the table to be written to. */ -class BufferedMutatorDelegator implements TypedBufferedMutator { +public class TypedBufferedMutator> { private final BufferedMutator bufferedMutator; @@ -38,7 +38,7 @@ class BufferedMutatorDelegator implements TypedBufferedMutator { * @param bufferedMutator the mutator to be wrapped for delegation. Shall not * be null. */ - public BufferedMutatorDelegator(BufferedMutator bufferedMutator) { + public TypedBufferedMutator(BufferedMutator bufferedMutator) { this.bufferedMutator = bufferedMutator; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTableRW.java similarity index 65% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTableRW.java index 988bba2852..111ae711b3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTableRW.java @@ -26,49 +26,15 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * The entity table as column families info, config and metrics. Info stores - * information about a timeline entity object config stores configuration data - * of a timeline entity object metrics stores the metrics of a timeline entity - * object - * - * Example entity table record: - * - *
- * |-------------------------------------------------------------------------|
- * |  Row       | Column Family                | Column Family| Column Family|
- * |  key       | info                         | metrics      | config       |
- * |-------------------------------------------------------------------------|
- * | userName!  | id:entityId                  | metricId1:   | configKey1:  |
- * | clusterId! |                              | metricValue1 | configValue1 |
- * | flowName!  | type:entityType              | @timestamp1  |              |
- * | flowRunId! |                              |              | configKey2:  |
- * | AppId!     | created_time:                | metricId1:   | configValue2 |
- * | entityType!| 1392993084018                | metricValue2 |              |
- * | idPrefix!  |                              | @timestamp2  |              |
- * | entityId   | i!infoKey:                   |              |              |
- * |            | infoValue                    | metricId1:   |              |
- * |            |                              | metricValue1 |              |
- * |            | r!relatesToKey:              | @timestamp2  |              |
- * |            | id3=id4=id5                  |              |              |
- * |            |                              |              |              |
- * |            | s!isRelatedToKey             |              |              |
- * |            | id7=id9=id6                  |              |              |
- * |            |                              |              |              |
- * |            | e!eventId=timestamp=infoKey: |              |              |
- * |            | eventInfoValue               |              |              |
- * |            |                              |              |              |
- * |            | flowVersion:                 |              |              |
- * |            | versionValue                 |              |              |
- * |-------------------------------------------------------------------------|
- * 
+ * Create, read and write to the Entity Table. */ -public class EntityTable extends BaseTable { +public class EntityTableRW extends BaseTableRW { /** entity prefix. */ private static final String PREFIX = YarnConfiguration.TIMELINE_SERVICE_PREFIX + "entity"; @@ -100,9 +66,9 @@ public class EntityTable extends BaseTable { private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000; private static final Logger LOG = - LoggerFactory.getLogger(EntityTable.class); + LoggerFactory.getLogger(EntityTableRW.class); - public EntityTable() { + public EntityTableRW() { super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); } @@ -110,8 +76,8 @@ public EntityTable() { * (non-Javadoc) * * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, + * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW# + * createTable(org.apache.hadoop.hbase.client.Admin, * org.apache.hadoop.conf.Configuration) */ public void createTable(Admin admin, Configuration hbaseConf) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTableRW.java similarity index 76% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTableRW.java index e646eb26a5..5b9fe13a88 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTableRW.java @@ -26,32 +26,14 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * The flow activity table has column family info - * Stores the daily activity record for flows - * Useful as a quick lookup of what flows were - * running on a given day - * - * Example flow activity table record: - * - *
- * |-------------------------------------------|
- * |  Row key   | Column Family                |
- * |            | info                         |
- * |-------------------------------------------|
- * | clusterId! | r!runid1:version1            |
- * | inv Top of |                              |
- * | Day!       | r!runid2:version7            |
- * | userName!  |                              |
- * | flowName   |                              |
- * |-------------------------------------------|
- * 
+ * Create, read and write to the FlowActivity Table. */ -public class FlowActivityTable extends BaseTable { +public class FlowActivityTableRW extends BaseTableRW { /** flow activity table prefix. */ private static final String PREFIX = YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".flowactivity"; @@ -64,12 +46,12 @@ public class FlowActivityTable extends BaseTable { "timelineservice.flowactivity"; private static final Logger LOG = - LoggerFactory.getLogger(FlowActivityTable.class); + LoggerFactory.getLogger(FlowActivityTableRW.class); /** default max number of versions. */ public static final int DEFAULT_METRICS_MAX_VERSIONS = Integer.MAX_VALUE; - public FlowActivityTable() { + public FlowActivityTableRW() { super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); } @@ -77,8 +59,8 @@ public FlowActivityTable() { * (non-Javadoc) * * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, + * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW# + * createTable(org.apache.hadoop.hbase.client.Admin, * org.apache.hadoop.conf.Configuration) */ public void createTable(Admin admin, Configuration hbaseConf) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTableRW.java similarity index 58% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTableRW.java index a1d32ee78c..61c073432a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTableRW.java @@ -26,66 +26,16 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Coprocessor; /** - * The flow run table has column family info - * Stores per flow run information - * aggregated across applications. - * - * Metrics are also stored in the info column family. - * - * Example flow run table record: - * - *
- * flow_run table
- * |-------------------------------------------|
- * |  Row key   | Column Family                |
- * |            | info                         |
- * |-------------------------------------------|
- * | clusterId! | flow_version:version7        |
- * | userName!  |                              |
- * | flowName!  | running_apps:1               |
- * | flowRunId  |                              |
- * |            | min_start_time:1392995080000 |
- * |            | #0:""                        |
- * |            |                              |
- * |            | min_start_time:1392995081012 |
- * |            | #0:appId2                    |
- * |            |                              |
- * |            | min_start_time:1392993083210 |
- * |            | #0:appId3                    |
- * |            |                              |
- * |            |                              |
- * |            | max_end_time:1392993084018   |
- * |            | #0:""                        |
- * |            |                              |
- * |            |                              |
- * |            | m!mapInputRecords:127        |
- * |            | #0:""                        |
- * |            |                              |
- * |            | m!mapInputRecords:31         |
- * |            | #2:appId2                    |
- * |            |                              |
- * |            | m!mapInputRecords:37         |
- * |            | #1:appId3                    |
- * |            |                              |
- * |            |                              |
- * |            | m!mapOutputRecords:181       |
- * |            | #0:""                        |
- * |            |                              |
- * |            | m!mapOutputRecords:37        |
- * |            | #1:appId3                    |
- * |            |                              |
- * |            |                              |
- * |-------------------------------------------|
- * 
+ * Create, read and write to the FlowRun table. */ -public class FlowRunTable extends BaseTable { +public class FlowRunTableRW extends BaseTableRW { /** entity prefix. */ private static final String PREFIX = YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".flowrun"; @@ -97,12 +47,12 @@ public class FlowRunTable extends BaseTable { public static final String DEFAULT_TABLE_NAME = "timelineservice.flowrun"; private static final Logger LOG = - LoggerFactory.getLogger(FlowRunTable.class); + LoggerFactory.getLogger(FlowRunTableRW.class); /** default max number of versions. */ public static final int DEFAULT_METRICS_MAX_VERSIONS = Integer.MAX_VALUE; - public FlowRunTable() { + public FlowRunTableRW() { super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); } @@ -110,8 +60,8 @@ public FlowRunTable() { * (non-Javadoc) * * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, + * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW# + * createTable(org.apache.hadoop.hbase.client.Admin, * org.apache.hadoop.conf.Configuration) */ public void createTable(Admin admin, Configuration hbaseConf) @@ -142,7 +92,8 @@ public void createTable(Admin admin, Configuration hbaseConf) Path coprocessorJarPath = new Path(coprocessorJarPathStr); LOG.info("CoprocessorJarPath=" + coprocessorJarPath.toString()); flowRunTableDescp.addCoprocessor( - FlowRunCoprocessor.class.getCanonicalName(), coprocessorJarPath, + "org.apache.hadoop.yarn.server.timelineservice.storage." + + "flow.FlowRunCoprocessor", coprocessorJarPath, Coprocessor.PRIORITY_USER, null); admin.createTable(flowRunTableDescp); LOG.info("Status of table creation for " + table.getNameAsString() + "=" diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java similarity index 91% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java index 5bacf66fb4..0956f1e567 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java @@ -26,7 +26,8 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey; -import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.webapp.NotFoundException; /** @@ -39,7 +40,7 @@ public abstract class AbstractTimelineStorageReader { /** * Used to look up the flow context. */ - private final AppToFlowTable appToFlowTable = new AppToFlowTable(); + private final AppToFlowTableRW appToFlowTable = new AppToFlowTableRW(); public AbstractTimelineStorageReader(TimelineReaderContext ctxt) { context = ctxt; @@ -66,12 +67,12 @@ protected FlowContext lookupFlowContext(AppToFlowRowKey appToFlowRowKey, Get get = new Get(rowKey); Result result = appToFlowTable.getResult(hbaseConf, conn, get); if (result != null && !result.isEmpty()) { - Object flowName = - AppToFlowColumnPrefix.FLOW_NAME.readResult(result, clusterId); - Object flowRunId = - AppToFlowColumnPrefix.FLOW_RUN_ID.readResult(result, clusterId); - Object userId = - AppToFlowColumnPrefix.USER_ID.readResult(result, clusterId); + Object flowName = ColumnRWHelper.readResult( + result, AppToFlowColumnPrefix.FLOW_NAME, clusterId); + Object flowRunId = ColumnRWHelper.readResult( + result, AppToFlowColumnPrefix.FLOW_RUN_ID, clusterId); + Object userId = ColumnRWHelper.readResult( + result, AppToFlowColumnPrefix.USER_ID, clusterId); if (flowName == null || userId == null || flowRunId == null) { throw new NotFoundException( "Unable to find the context flow name, and flow run id, " diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java similarity index 97% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java index 0edd6a5268..7440316c9e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java @@ -49,8 +49,9 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils; @@ -63,23 +64,23 @@ * application table. */ class ApplicationEntityReader extends GenericEntityReader { - private static final ApplicationTable APPLICATION_TABLE = - new ApplicationTable(); + private static final ApplicationTableRW APPLICATION_TABLE = + new ApplicationTableRW(); - public ApplicationEntityReader(TimelineReaderContext ctxt, + ApplicationEntityReader(TimelineReaderContext ctxt, TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) { super(ctxt, entityFilters, toRetrieve); } - public ApplicationEntityReader(TimelineReaderContext ctxt, + ApplicationEntityReader(TimelineReaderContext ctxt, TimelineDataToRetrieve toRetrieve) { super(ctxt, toRetrieve); } /** - * Uses the {@link ApplicationTable}. + * Uses the {@link ApplicationTableRW}. */ - protected BaseTable getTable() { + protected BaseTableRW getTable() { return APPLICATION_TABLE; } @@ -430,12 +431,14 @@ protected TimelineEntity parseEntity(Result result) throws IOException { } TimelineEntity entity = new TimelineEntity(); entity.setType(TimelineEntityType.YARN_APPLICATION.toString()); - String entityId = ApplicationColumn.ID.readResult(result).toString(); + String entityId = + ColumnRWHelper.readResult(result, ApplicationColumn.ID).toString(); entity.setId(entityId); TimelineEntityFilters filters = getFilters(); // fetch created time - Long createdTime = (Long) ApplicationColumn.CREATED_TIME.readResult(result); + Long createdTime = (Long) ColumnRWHelper.readResult(result, + ApplicationColumn.CREATED_TIME); entity.setCreatedTime(createdTime); EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java similarity index 98% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java index 3f723342a4..ebe21a4763 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java @@ -33,7 +33,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,7 +51,7 @@ public final class EntityTypeReader extends AbstractTimelineStorageReader { private static final Logger LOG = LoggerFactory.getLogger(EntityTypeReader.class); - private static final EntityTable ENTITY_TABLE = new EntityTable(); + private static final EntityTableRW ENTITY_TABLE = new EntityTableRW(); public EntityTypeReader(TimelineReaderContext context) { super(context); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java similarity index 92% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java index a1cdb29b12..d0a0f3bb46 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java @@ -35,13 +35,14 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderUtils; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongKeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; import com.google.common.base.Preconditions; @@ -51,8 +52,8 @@ * flow activity table. */ class FlowActivityEntityReader extends TimelineEntityReader { - private static final FlowActivityTable FLOW_ACTIVITY_TABLE = - new FlowActivityTable(); + private static final FlowActivityTableRW FLOW_ACTIVITY_TABLE = + new FlowActivityTableRW(); /** * Used to convert Long key components to and from storage format. @@ -60,21 +61,21 @@ class FlowActivityEntityReader extends TimelineEntityReader { private final KeyConverter longKeyConverter = new LongKeyConverter(); - public FlowActivityEntityReader(TimelineReaderContext ctxt, + FlowActivityEntityReader(TimelineReaderContext ctxt, TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) { super(ctxt, entityFilters, toRetrieve); } - public FlowActivityEntityReader(TimelineReaderContext ctxt, + FlowActivityEntityReader(TimelineReaderContext ctxt, TimelineDataToRetrieve toRetrieve) { super(ctxt, toRetrieve); } /** - * Uses the {@link FlowActivityTable}. + * Uses the {@link FlowActivityTableRW}. */ @Override - protected BaseTable getTable() { + protected BaseTableRW getTable() { return FLOW_ACTIVITY_TABLE; } @@ -164,8 +165,8 @@ protected TimelineEntity parseEntity(Result result) throws IOException { flowActivity.setId(flowActivity.getId()); // get the list of run ids along with the version that are associated with // this flow on this day - Map runIdsMap = - FlowActivityColumnPrefix.RUN_ID.readResults(result, longKeyConverter); + Map runIdsMap = ColumnRWHelper.readResults(result, + FlowActivityColumnPrefix.RUN_ID, longKeyConverter); for (Map.Entry e : runIdsMap.entrySet()) { Long runId = e.getKey(); String version = (String)e.getValue(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java similarity index 94% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java index af043b3bc7..33a2cf67a2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java @@ -43,7 +43,8 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumn; @@ -51,7 +52,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; import com.google.common.base.Preconditions; @@ -61,23 +62,23 @@ * table. */ class FlowRunEntityReader extends TimelineEntityReader { - private static final FlowRunTable FLOW_RUN_TABLE = new FlowRunTable(); + private static final FlowRunTableRW FLOW_RUN_TABLE = new FlowRunTableRW(); - public FlowRunEntityReader(TimelineReaderContext ctxt, + FlowRunEntityReader(TimelineReaderContext ctxt, TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) { super(ctxt, entityFilters, toRetrieve); } - public FlowRunEntityReader(TimelineReaderContext ctxt, + FlowRunEntityReader(TimelineReaderContext ctxt, TimelineDataToRetrieve toRetrieve) { super(ctxt, toRetrieve); } /** - * Uses the {@link FlowRunTable}. + * Uses the {@link FlowRunTableRW}. */ @Override - protected BaseTable getTable() { + protected BaseTableRW getTable() { return FLOW_RUN_TABLE; } @@ -261,19 +262,22 @@ protected TimelineEntity parseEntity(Result result) throws IOException { flowRun.setName(rowKey.getFlowName()); // read the start time - Long startTime = (Long) FlowRunColumn.MIN_START_TIME.readResult(result); + Long startTime = (Long) ColumnRWHelper.readResult(result, + FlowRunColumn.MIN_START_TIME); if (startTime != null) { flowRun.setStartTime(startTime.longValue()); } // read the end time if available - Long endTime = (Long) FlowRunColumn.MAX_END_TIME.readResult(result); + Long endTime = (Long) ColumnRWHelper.readResult(result, + FlowRunColumn.MAX_END_TIME); if (endTime != null) { flowRun.setMaxEndTime(endTime.longValue()); } // read the flow version - String version = (String) FlowRunColumn.FLOW_VERSION.readResult(result); + String version = (String) ColumnRWHelper.readResult(result, + FlowRunColumn.FLOW_VERSION); if (version != null) { flowRun.setVersion(version); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java similarity index 97% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java index 3a444455bd..02eca84f1f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java @@ -46,7 +46,9 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix; @@ -57,7 +59,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; import com.google.common.base.Preconditions; @@ -67,7 +69,7 @@ * table. */ class GenericEntityReader extends TimelineEntityReader { - private static final EntityTable ENTITY_TABLE = new EntityTable(); + private static final EntityTableRW ENTITY_TABLE = new EntityTableRW(); /** * Used to convert strings key components to and from storage format. @@ -75,20 +77,20 @@ class GenericEntityReader extends TimelineEntityReader { private final KeyConverter stringKeyConverter = new StringKeyConverter(); - public GenericEntityReader(TimelineReaderContext ctxt, + GenericEntityReader(TimelineReaderContext ctxt, TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) { super(ctxt, entityFilters, toRetrieve); } - public GenericEntityReader(TimelineReaderContext ctxt, + GenericEntityReader(TimelineReaderContext ctxt, TimelineDataToRetrieve toRetrieve) { super(ctxt, toRetrieve); } /** - * Uses the {@link EntityTable}. + * Uses the {@link EntityTableRW}. */ - protected BaseTable getTable() { + protected BaseTableRW getTable() { return ENTITY_TABLE; } @@ -543,7 +545,8 @@ protected TimelineEntity parseEntity(Result result) throws IOException { TimelineEntityFilters filters = getFilters(); // fetch created time - Long createdTime = (Long) EntityColumn.CREATED_TIME.readResult(result); + Long createdTime = (Long) ColumnRWHelper.readResult(result, + EntityColumn.CREATED_TIME); entity.setCreatedTime(createdTime); EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); @@ -635,11 +638,12 @@ protected TimelineEntity parseEntity(Result result) throws IOException { * @param isConfig if true, means we are reading configs, otherwise info. * @throws IOException if any problem is encountered while reading result. */ - protected void readKeyValuePairs(TimelineEntity entity, Result result, + protected > void readKeyValuePairs( + TimelineEntity entity, Result result, ColumnPrefix prefix, boolean isConfig) throws IOException { // info and configuration are of type Map Map columns = - prefix.readResults(result, stringKeyConverter); + ColumnRWHelper.readResults(result, prefix, stringKeyConverter); if (isConfig) { for (Map.Entry column : columns.entrySet()) { entity.addConfig(column.getKey(), column.getValue().toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java similarity index 98% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java index e780dccd3a..faed34857d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java @@ -42,7 +42,8 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils; @@ -51,14 +52,14 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW; import org.apache.hadoop.yarn.webapp.BadRequestException; import com.google.common.base.Preconditions; class SubApplicationEntityReader extends GenericEntityReader { - private static final SubApplicationTable SUB_APPLICATION_TABLE = - new SubApplicationTable(); + private static final SubApplicationTableRW SUB_APPLICATION_TABLE = + new SubApplicationTableRW(); SubApplicationEntityReader(TimelineReaderContext ctxt, TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) { @@ -71,9 +72,9 @@ class SubApplicationEntityReader extends GenericEntityReader { } /** - * Uses the {@link SubApplicationTable}. + * Uses the {@link SubApplicationTableRW}. */ - protected BaseTable getTable() { + protected BaseTableRW getTable() { return SUB_APPLICATION_TABLE; } @@ -403,8 +404,8 @@ protected TimelineEntity parseEntity(Result result) throws IOException { TimelineEntityFilters filters = getFilters(); // fetch created time - Long createdTime = - (Long) SubApplicationColumn.CREATED_TIME.readResult(result); + Long createdTime = (Long) ColumnRWHelper.readResult(result, + SubApplicationColumn.CREATED_TIME); entity.setCreatedTime(createdTime); EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java similarity index 93% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java index 07e8423425..3168163ed9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java @@ -44,7 +44,9 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; @@ -72,7 +74,7 @@ public abstract class TimelineEntityReader extends /** * Main table the entity reader uses. */ - private BaseTable table; + private BaseTableRW table; /** * Used to convert strings key components to and from storage format. @@ -261,7 +263,7 @@ public Set readEntities(Configuration hbaseConf, * * @return A reference to the table. */ - protected BaseTable getTable() { + protected BaseTableRW getTable() { return table; } @@ -314,8 +316,8 @@ protected abstract TimelineEntity parseEntity(Result result) protected void readMetrics(TimelineEntity entity, Result result, ColumnPrefix columnPrefix) throws IOException { NavigableMap> metricsResult = - columnPrefix.readResultsWithTimestamps( - result, stringKeyConverter); + ColumnRWHelper.readResultsWithTimestamps( + result, columnPrefix, stringKeyConverter); for (Map.Entry> metricResult: metricsResult.entrySet()) { TimelineMetric metric = new TimelineMetric(); @@ -340,7 +342,7 @@ public boolean isSingleEntityRead() { return singleEntityRead; } - protected void setTable(BaseTable baseTable) { + protected void setTable(BaseTableRW baseTable) { this.table = baseTable; } @@ -367,8 +369,9 @@ protected boolean hasField(EnumSet fieldsToRetrieve, * @param columns set of column qualifiers. * @return filter list. */ - protected FilterList createFiltersFromColumnQualifiers( - ColumnPrefix colPrefix, Set columns) { + protected > FilterList + createFiltersFromColumnQualifiers( + ColumnPrefix colPrefix, Set columns) { FilterList list = new FilterList(Operator.MUST_PASS_ONE); for (String column : columns) { // For columns which have compound column qualifiers (eg. events), we need @@ -381,8 +384,8 @@ protected FilterList createFiltersFromColumnQualifiers( return list; } - protected byte[] createColQualifierPrefix(ColumnPrefix colPrefix, - String column) { + protected > byte[] createColQualifierPrefix( + ColumnPrefix colPrefix, String column) { if (colPrefix == ApplicationColumnPrefix.EVENT || colPrefix == EntityColumnPrefix.EVENT) { return new EventColumnName(column, null, null).getColumnQualifier(); @@ -402,11 +405,12 @@ protected byte[] createColQualifierPrefix(ColumnPrefix colPrefix, * isRelatedTo, otherwise its added to relatesTo. * @throws IOException if any problem is encountered while reading result. */ - protected void readRelationship(TimelineEntity entity, Result result, + protected > void readRelationship( + TimelineEntity entity, Result result, ColumnPrefix prefix, boolean isRelatedTo) throws IOException { // isRelatedTo and relatesTo are of type Map> - Map columns = - prefix.readResults(result, stringKeyConverter); + Map columns = ColumnRWHelper.readResults( + result, prefix, stringKeyConverter); for (Map.Entry column : columns.entrySet()) { for (String id : Separator.VALUES.splitEncoded(column.getValue() .toString())) { @@ -430,11 +434,12 @@ protected void readRelationship(TimelineEntity entity, Result result, * @param prefix column prefix. * @throws IOException if any problem is encountered while reading result. */ - protected static void readEvents(TimelineEntity entity, Result result, + protected static > void readEvents( + TimelineEntity entity, Result result, ColumnPrefix prefix) throws IOException { Map eventsMap = new HashMap<>(); - Map eventsResult = - prefix.readResults(result, new EventColumnNameConverter()); + Map eventsResult = ColumnRWHelper.readResults( + result, prefix, new EventColumnNameConverter()); for (Map.Entry eventResult : eventsResult.entrySet()) { EventColumnName eventColumnName = eventResult.getKey(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTableRW.java similarity index 64% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTableRW.java index 785a243d93..256b24b72e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTableRW.java @@ -26,52 +26,15 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * The sub application table has column families: - * info, config and metrics. - * Info stores information about a timeline entity object - * config stores configuration data of a timeline entity object - * metrics stores the metrics of a timeline entity object - * - * Example sub application table record: - * - *
- * |-------------------------------------------------------------------------|
- * |  Row          | Column Family             | Column Family| Column Family|
- * |  key          | info                      | metrics      | config       |
- * |-------------------------------------------------------------------------|
- * | subAppUserId! | id:entityId               | metricId1:   | configKey1:  |
- * | clusterId!    | type:entityType           | metricValue1 | configValue1 |
- * | entityType!   |                           | @timestamp1  |              |
- * | idPrefix!|    |                           |              | configKey2:  |
- * | entityId!     | created_time:             | metricId1:   | configValue2 |
- * | userId        | 1392993084018             | metricValue2 |              |
- * |               |                           | @timestamp2  |              |
- * |               | i!infoKey:                |              |              |
- * |               | infoValue                 | metricId1:   |              |
- * |               |                           | metricValue1 |              |
- * |               |                           | @timestamp2  |              |
- * |               | e!eventId=timestamp=      |              |              |
- * |               | infoKey:                  |              |              |
- * |               | eventInfoValue            |              |              |
- * |               |                           |              |              |
- * |               | r!relatesToKey:           |              |              |
- * |               | id3=id4=id5               |              |              |
- * |               |                           |              |              |
- * |               | s!isRelatedToKey          |              |              |
- * |               | id7=id9=id6               |              |              |
- * |               |                           |              |              |
- * |               | flowVersion:              |              |              |
- * |               | versionValue              |              |              |
- * |-------------------------------------------------------------------------|
- * 
+ * Create, read and write to the SubApplication table. */ -public class SubApplicationTable extends BaseTable { +public class SubApplicationTableRW extends BaseTableRW { /** sub app prefix. */ private static final String PREFIX = YarnConfiguration.TIMELINE_SERVICE_PREFIX + "subapplication"; @@ -104,9 +67,9 @@ public class SubApplicationTable extends BaseTable { private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000; private static final Logger LOG = LoggerFactory.getLogger( - SubApplicationTable.class); + SubApplicationTableRW.class); - public SubApplicationTable() { + public SubApplicationTableRW() { super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); } @@ -114,8 +77,8 @@ public SubApplicationTable() { * (non-Javadoc) * * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, + * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW# + * createTable(org.apache.hadoop.hbase.client.Admin, * org.apache.hadoop.conf.Configuration) */ public void createTable(Admin admin, Configuration hbaseConf) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/pom.xml new file mode 100644 index 0000000000..cb0d6e2225 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/pom.xml @@ -0,0 +1,132 @@ + + + + + hadoop-yarn-server-timelineservice-hbase + org.apache.hadoop + 3.2.0-SNAPSHOT + + 4.0.0 + + hadoop-yarn-server-timelineservice-hbase-common + Apache Hadoop YARN TimelineService HBase Common + 3.2.0-SNAPSHOT + + + + ${project.parent.parent.parent.basedir} + + + + + org.apache.hadoop + hadoop-annotations + provided + + + + org.apache.hadoop + hadoop-yarn-api + provided + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice + + + + + org.apache.hadoop + hadoop-yarn-server-applicationhistoryservice + provided + + + + + org.apache.hadoop + hadoop-common + test-jar + test + + + + org.apache.hbase + hbase-common + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.mortbay.jetty + jetty-util + + + + + + com.google.guava + guava + test + + + + junit + junit + test + + + + + + + maven-jar-plugin + + + + test-jar + + test-compile + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + package + + copy-dependencies + + + runtime + org.slf4j,org.apache.hadoop,com.github.stephenc.findbugs + ${project.build.directory}/lib + + + + + + + \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java similarity index 78% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java index 00eaa7eb3f..c3d6a5255b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java @@ -17,17 +17,12 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.application; -import java.io.IOException; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; @@ -52,10 +47,10 @@ public enum ApplicationColumn implements Column { */ FLOW_VERSION(ApplicationColumnFamily.INFO, "flow_version"); - private final ColumnHelper column; private final ColumnFamily columnFamily; private final String columnQualifier; private final byte[] columnQualifierBytes; + private final ValueConverter valueConverter; private ApplicationColumn(ColumnFamily columnFamily, String columnQualifier) { @@ -69,7 +64,7 @@ private ApplicationColumn(ColumnFamily columnFamily, // Future-proof by ensuring the right column prefix hygiene. this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); - this.column = new ColumnHelper(columnFamily, converter); + this.valueConverter = converter; } /** @@ -79,17 +74,6 @@ private String getColumnQualifier() { return columnQualifier; } - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, Long timestamp, - Object inputValue, Attribute... attributes) throws IOException { - column.store(rowKey, tableMutator, columnQualifierBytes, timestamp, - inputValue, attributes); - } - - public Object readResult(Result result) throws IOException { - return column.readResult(result, columnQualifierBytes); - } - @Override public byte[] getColumnQualifierBytes() { return columnQualifierBytes.clone(); @@ -102,7 +86,16 @@ public byte[] getColumnFamilyBytes() { @Override public ValueConverter getValueConverter() { - return column.getValueConverter(); + return valueConverter; } + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; + } + + @Override + public boolean supplementCellTimestamp() { + return false; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnFamily.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnFamily.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnFamily.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java similarity index 55% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java index 8297dc5b76..89412f450a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java @@ -17,20 +17,13 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.application; -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; @@ -69,7 +62,6 @@ public enum ApplicationColumnPrefix implements ColumnPrefix { */ METRIC(ApplicationColumnFamily.METRICS, null, new LongConverter()); - private final ColumnHelper column; private final ColumnFamily columnFamily; /** @@ -78,6 +70,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix { */ private final String columnPrefix; private final byte[] columnPrefixBytes; + private final ValueConverter valueConverter; /** * Private constructor, meant to be used by the enum definition. @@ -100,7 +93,7 @@ private ApplicationColumnPrefix(ColumnFamily columnFamily, */ private ApplicationColumnPrefix(ColumnFamily columnFamily, String columnPrefix, ValueConverter converter) { - column = new ColumnHelper(columnFamily, converter); + this.valueConverter = converter; this.columnFamily = columnFamily; this.columnPrefix = columnPrefix; if (columnPrefix == null) { @@ -136,101 +129,22 @@ public byte[] getColumnFamilyBytes() { return columnFamily.getBytes(); } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); + @Override + public byte[] getColumnPrefixInBytes() { + return columnPrefixBytes != null ? columnPrefixBytes.clone() : null; } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute...attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResult(org.apache.hadoop.hbase.client.Result, java.lang.String) - */ - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = - ColumnHelper.getColumnQualifier(this.columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); + @Override + public boolean supplementCellTimeStamp() { + return false; } public ValueConverter getValueConverter() { - return column.getValueConverter(); + return valueConverter; } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResults(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public Map readResults(Result result, - KeyConverter keyConverter) throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public NavigableMap> - readResultsWithTimestamps(Result result, KeyConverter keyConverter) - throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKeyPrefix.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKeyPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKeyPrefix.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java new file mode 100644 index 0000000000..16ab5fada0 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.application; + +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; + +/** + * The application table as column families info, config and metrics. Info + * stores information about a YARN application entity, config stores + * configuration data of a YARN application, metrics stores the metrics of a + * YARN application. This table is entirely analogous to the entity table but + * created for better performance. + * + * Example application table record: + * + *
+ * |-------------------------------------------------------------------------|
+ * |  Row       | Column Family                | Column Family| Column Family|
+ * |  key       | info                         | metrics      | config       |
+ * |-------------------------------------------------------------------------|
+ * | clusterId! | id:appId                     | metricId1:   | configKey1:  |
+ * | userName!  |                              | metricValue1 | configValue1 |
+ * | flowName!  | created_time:                | @timestamp1  |              |
+ * | flowRunId! | 1392993084018                |              | configKey2:  |
+ * | AppId      |                              | metriciD1:   | configValue2 |
+ * |            | i!infoKey:                   | metricValue2 |              |
+ * |            | infoValue                    | @timestamp2  |              |
+ * |            |                              |              |              |
+ * |            | r!relatesToKey:              | metricId2:   |              |
+ * |            | id3=id4=id5                  | metricValue1 |              |
+ * |            |                              | @timestamp2  |              |
+ * |            | s!isRelatedToKey:            |              |              |
+ * |            | id7=id9=id6                  |              |              |
+ * |            |                              |              |              |
+ * |            | e!eventId=timestamp=infoKey: |              |              |
+ * |            | eventInfoValue               |              |              |
+ * |            |                              |              |              |
+ * |            | flowVersion:                 |              |              |
+ * |            | versionValue                 |              |              |
+ * |-------------------------------------------------------------------------|
+ * 
+ */ +public final class ApplicationTable extends BaseTable { +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java new file mode 100644 index 0000000000..03f508f131 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.application + * contains classes related to implementation for application table. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.application; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java similarity index 78% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java index 67497fc8c9..0065f07764 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java @@ -18,18 +18,14 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow; -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; -import java.io.IOException; - /** * Identifies fully qualified columns for the {@link AppToFlowTable}. */ @@ -50,10 +46,10 @@ public enum AppToFlowColumn implements Column { */ USER_ID(AppToFlowColumnFamily.MAPPING, "user_id"); - private final ColumnHelper column; private final ColumnFamily columnFamily; private final String columnQualifier; private final byte[] columnQualifierBytes; + private final ValueConverter valueConverter; AppToFlowColumn(ColumnFamily columnFamily, String columnQualifier) { @@ -62,7 +58,7 @@ public enum AppToFlowColumn implements Column { // Future-proof by ensuring the right column prefix hygiene. this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); - this.column = new ColumnHelper(columnFamily); + this.valueConverter = GenericConverter.getInstance(); } /** @@ -77,13 +73,6 @@ public byte[] getColumnQualifierBytes() { return columnQualifierBytes.clone(); } - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, Long timestamp, - Object inputValue, Attribute... attributes) throws IOException { - column.store(rowKey, tableMutator, columnQualifierBytes, timestamp, - inputValue, attributes); - } - @Override public byte[] getColumnFamilyBytes() { return columnFamily.getBytes(); @@ -91,11 +80,16 @@ public byte[] getColumnFamilyBytes() { @Override public ValueConverter getValueConverter() { - return column.getValueConverter(); + return valueConverter; } - public Object readResult(Result result) throws IOException { - return column.readResult(result, columnQualifierBytes); + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; } + @Override + public boolean supplementCellTimestamp() { + return false; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java new file mode 100644 index 0000000000..9540129899 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow; + +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; + +/** + * Identifies partially qualified columns for the app-to-flow table. + */ +public enum AppToFlowColumnPrefix implements ColumnPrefix { + + /** + * The flow name. + */ + FLOW_NAME(AppToFlowColumnFamily.MAPPING, "flow_name"), + + /** + * The flow run ID. + */ + FLOW_RUN_ID(AppToFlowColumnFamily.MAPPING, "flow_run_id"), + + /** + * The user. + */ + USER_ID(AppToFlowColumnFamily.MAPPING, "user_id"); + + private final ColumnFamily columnFamily; + private final String columnPrefix; + private final byte[] columnPrefixBytes; + private final ValueConverter valueConverter; + + AppToFlowColumnPrefix(ColumnFamily columnFamily, + String columnPrefix) { + this.columnFamily = columnFamily; + this.columnPrefix = columnPrefix; + if (columnPrefix == null) { + this.columnPrefixBytes = null; + } else { + // Future-proof by ensuring the right column prefix hygiene. + this.columnPrefixBytes = + Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); + } + this.valueConverter = GenericConverter.getInstance(); + } + + @Override + public byte[] getColumnPrefixBytes(String qualifierPrefix) { + return ColumnHelper.getColumnQualifier( + columnPrefixBytes, qualifierPrefix); + } + + @Override + public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { + return ColumnHelper.getColumnQualifier( + columnPrefixBytes, qualifierPrefix); + } + + @Override + public byte[] getColumnPrefixInBytes() { + return columnPrefixBytes != null ? columnPrefixBytes.clone() : null; + } + + @Override + public byte[] getColumnFamilyBytes() { + return columnFamily.getBytes(); + } + + @Override + public ValueConverter getValueConverter() { + return valueConverter; + } + + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; + } + + @Override + public boolean supplementCellTimeStamp() { + return false; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java new file mode 100644 index 0000000000..e184288db2 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow; + +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; + +/** + * The app_flow table as column families mapping. Mapping stores + * appId to flowName and flowRunId mapping information + * + * Example app_flow table record: + * + *
+ * |--------------------------------------|
+ * |  Row       | Column Family           |
+ * |  key       | mapping                 |
+ * |--------------------------------------|
+ * | appId      | flow_name!cluster1:     |
+ * |            | foo@daily_hive_report   |
+ * |            |                         |
+ * |            | flow_run_id!cluster1:   |
+ * |            | 1452828720457           |
+ * |            |                         |
+ * |            | user_id!cluster1:       |
+ * |            | admin                   |
+ * |            |                         |
+ * |            | flow_name!cluster2:     |
+ * |            | bar@ad_hoc_query        |
+ * |            |                         |
+ * |            | flow_run_id!cluster2:   |
+ * |            | 1452828498752           |
+ * |            |                         |
+ * |            | user_id!cluster2:       |
+ * |            | joe                     |
+ * |            |                         |
+ * |--------------------------------------|
+ * 
+ * + * It is possible (although unlikely) in a multi-cluster environment that there + * may be more than one applications for a given app id. Different clusters are + * recorded as different sets of columns. + */ +public final class AppToFlowTable extends BaseTable { +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java new file mode 100644 index 0000000000..f01d982604 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow + * contains classes related to implementation for app to flow table. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java similarity index 95% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java index 51604f012c..d934f7440d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java @@ -55,7 +55,7 @@ public byte[] encode(String appIdStr) { LongConverter.invertLong(appId.getClusterTimestamp())); System.arraycopy(clusterTs, 0, appIdBytes, 0, Bytes.SIZEOF_LONG); byte[] seqId = Bytes.toBytes( - HBaseTimelineStorageUtils.invertInt(appId.getId())); + HBaseTimelineSchemaUtils.invertInt(appId.getId())); System.arraycopy(seqId, 0, appIdBytes, Bytes.SIZEOF_LONG, Bytes.SIZEOF_INT); return appIdBytes; } @@ -80,9 +80,9 @@ public String decode(byte[] appIdBytes) { } long clusterTs = LongConverter.invertLong( Bytes.toLong(appIdBytes, 0, Bytes.SIZEOF_LONG)); - int seqId = HBaseTimelineStorageUtils.invertInt( + int seqId = HBaseTimelineSchemaUtils.invertInt( Bytes.toInt(appIdBytes, Bytes.SIZEOF_LONG, Bytes.SIZEOF_INT)); - return HBaseTimelineStorageUtils.convertApplicationIdToString( + return HBaseTimelineSchemaUtils.convertApplicationIdToString( ApplicationId.newInstance(clusterTs, seqId)); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java similarity index 73% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java index 64a11f8f24..433b352016 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java @@ -15,14 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.yarn.server.timelineservice.storage.common; -import org.apache.hadoop.hbase.client.BufferedMutator; /** - * Just a typed wrapper around {@link BufferedMutator} used to ensure that - * columns can write only to the table mutator for the right table. + * The base type of tables. + * @param T table type */ -public interface TypedBufferedMutator extends BufferedMutator { - // This class is intentionally left (almost) blank +public abstract class BaseTable { } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java similarity index 50% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java index 90f2de4ef3..2b50252c71 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java @@ -17,49 +17,13 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.common; -import java.io.IOException; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; /** * A Column represents the way to store a fully qualified column in a specific * table. */ -public interface Column { - - /** - * Sends a Mutation to the table. The mutations will be buffered and sent over - * the wire as part of a batch. - * - * @param rowKey identifying the row to write. Nothing gets written when null. - * @param tableMutator used to modify the underlying HBase table. Caller is - * responsible to pass a mutator for the table that actually has this - * column. - * @param timestamp version timestamp. When null the server timestamp will be - * used. - * @param attributes Map of attributes for this mutation. used in the - * coprocessor to set/read the cell tags. Can be null. - * @param inputValue the value to write to the rowKey and column qualifier. - * Nothing gets written when null. - * @throws IOException if there is any exception encountered during store. - */ - void store(byte[] rowKey, TypedBufferedMutator tableMutator, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException; - - /** - * Get the latest version of this specified column. Note: this call clones the - * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}. - * - * @param result Cannot be null - * @return result object (can be cast to whatever object was written to), or - * null when result doesn't contain this column. - * @throws IOException if there is any exception encountered while reading - * result. - */ - Object readResult(Result result) throws IOException; - +public interface Column> { /** * Returns column family name(as bytes) associated with this column. * @return a byte array encoding column family for this column qualifier. @@ -77,4 +41,16 @@ void store(byte[] rowKey, TypedBufferedMutator tableMutator, * @return a {@link ValueConverter} implementation. */ ValueConverter getValueConverter(); + + /** + * Return attributed combined with aggregations, if any. + * @return an array of Attributes + */ + Attribute[] getCombinedAttrsWithAggr(Attribute... attributes); + + /** + * Return true if the cell timestamp needs to be supplemented. + * @return true if the cell timestamp needs to be supplemented + */ + boolean supplementCellTimestamp(); } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java similarity index 95% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java index 452adcdcb4..c5c8cb46a5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java @@ -22,7 +22,7 @@ * * @param refers to the table for which this column family is used for. */ -public interface ColumnFamily { +public interface ColumnFamily> { /** * Keep a local copy if you need to avoid overhead of repeated cloning. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java new file mode 100644 index 0000000000..b173ef20d3 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import org.apache.hadoop.hbase.util.Bytes; + +/** + * This class is meant to be used only by explicit Columns, and not directly to + * write by clients. + */ +public final class ColumnHelper { + + private ColumnHelper() { + } + + + /** + * @param columnPrefixBytes The byte representation for the column prefix. + * Should not contain {@link Separator#QUALIFIERS}. + * @param qualifier for the remainder of the column. + * {@link Separator#QUALIFIERS} is permissible in the qualifier + * as it is joined only with the column prefix bytes. + * @return fully sanitized column qualifier that is a combination of prefix + * and qualifier. If prefix is null, the result is simply the encoded + * qualifier without any separator. + */ + public static byte[] getColumnQualifier(byte[] columnPrefixBytes, + String qualifier) { + + // We don't want column names to have spaces / tabs. + byte[] encodedQualifier = + Separator.encode(qualifier, Separator.SPACE, Separator.TAB); + if (columnPrefixBytes == null) { + return encodedQualifier; + } + + // Convert qualifier to lower case, strip of separators and tag on column + // prefix. + byte[] columnQualifier = + Separator.QUALIFIERS.join(columnPrefixBytes, encodedQualifier); + return columnQualifier; + } + + /** + * @param columnPrefixBytes The byte representation for the column prefix. + * Should not contain {@link Separator#QUALIFIERS}. + * @param qualifier for the remainder of the column. + * @return fully sanitized column qualifier that is a combination of prefix + * and qualifier. If prefix is null, the result is simply the encoded + * qualifier without any separator. + */ + public static byte[] getColumnQualifier(byte[] columnPrefixBytes, + long qualifier) { + + if (columnPrefixBytes == null) { + return Bytes.toBytes(qualifier); + } + + // Convert qualifier to lower case, strip of separators and tag on column + // prefix. + byte[] columnQualifier = + Separator.QUALIFIERS.join(columnPrefixBytes, Bytes.toBytes(qualifier)); + return columnQualifier; + } + + /** + * @param columnPrefixBytes The byte representation for the column prefix. + * Should not contain {@link Separator#QUALIFIERS}. + * @param qualifier the byte representation for the remainder of the column. + * @return fully sanitized column qualifier that is a combination of prefix + * and qualifier. If prefix is null, the result is simply the encoded + * qualifier without any separator. + */ + public static byte[] getColumnQualifier(byte[] columnPrefixBytes, + byte[] qualifier) { + + if (columnPrefixBytes == null) { + return qualifier; + } + + byte[] columnQualifier = + Separator.QUALIFIERS.join(columnPrefixBytes, qualifier); + return columnQualifier; + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java new file mode 100644 index 0000000000..cbcd936304 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; + +/** + * Used to represent a partially qualified column, where the actual column name + * will be composed of a prefix and the remainder of the column qualifier. The + * prefix can be null, in which case the column qualifier will be completely + * determined when the values are stored. + */ +public interface ColumnPrefix> { + + /** + * @param qualifierPrefix Column qualifier or prefix of qualifier. + * @return a byte array encoding column prefix and qualifier/prefix passed. + */ + byte[] getColumnPrefixBytes(String qualifierPrefix); + + /** + * @param qualifierPrefix Column qualifier or prefix of qualifier. + * @return a byte array encoding column prefix and qualifier/prefix passed. + */ + byte[] getColumnPrefixBytes(byte[] qualifierPrefix); + + /** + * Get the column prefix in bytes. + * @return column prefix in bytes + */ + byte[] getColumnPrefixInBytes(); + + /** + * Returns column family name(as bytes) associated with this column prefix. + * @return a byte array encoding column family for this prefix. + */ + byte[] getColumnFamilyBytes(); + + /** + * Returns value converter implementation associated with this column prefix. + * @return a {@link ValueConverter} implementation. + */ + ValueConverter getValueConverter(); + + /** + * Return attributed combined with aggregations, if any. + * @return an array of Attributes + */ + Attribute[] getCombinedAttrsWithAggr(Attribute... attributes); + + /** + * Return true if the cell timestamp needs to be supplemented. + * @return true if the cell timestamp needs to be supplemented + */ + boolean supplementCellTimeStamp(); +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnName.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnName.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnName.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnName.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnNameConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnNameConverter.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnNameConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/EventColumnNameConverter.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/GenericConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/GenericConverter.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/GenericConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/GenericConverter.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineSchemaUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineSchemaUtils.java new file mode 100644 index 0000000000..e5f92cccc4 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineSchemaUtils.java @@ -0,0 +1,156 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; + +import java.text.NumberFormat; + +/** + * A bunch of utility functions used in HBase TimelineService common module. + */ +public final class HBaseTimelineSchemaUtils { + /** milliseconds in one day. */ + public static final long MILLIS_ONE_DAY = 86400000L; + + private static final ThreadLocal APP_ID_FORMAT = + new ThreadLocal() { + @Override + public NumberFormat initialValue() { + NumberFormat fmt = NumberFormat.getInstance(); + fmt.setGroupingUsed(false); + fmt.setMinimumIntegerDigits(4); + return fmt; + } + }; + + private HBaseTimelineSchemaUtils() { + } + + /** + * Combines the input array of attributes and the input aggregation operation + * into a new array of attributes. + * + * @param attributes Attributes to be combined. + * @param aggOp Aggregation operation. + * @return array of combined attributes. + */ + public static Attribute[] combineAttributes(Attribute[] attributes, + AggregationOperation aggOp) { + int newLength = getNewLengthCombinedAttributes(attributes, aggOp); + Attribute[] combinedAttributes = new Attribute[newLength]; + + if (attributes != null) { + System.arraycopy(attributes, 0, combinedAttributes, 0, attributes.length); + } + + if (aggOp != null) { + Attribute a2 = aggOp.getAttribute(); + combinedAttributes[newLength - 1] = a2; + } + return combinedAttributes; + } + + /** + * Returns a number for the new array size. The new array is the combination + * of input array of attributes and the input aggregation operation. + * + * @param attributes Attributes. + * @param aggOp Aggregation operation. + * @return the size for the new array + */ + private static int getNewLengthCombinedAttributes(Attribute[] attributes, + AggregationOperation aggOp) { + int oldLength = getAttributesLength(attributes); + int aggLength = getAppOpLength(aggOp); + return oldLength + aggLength; + } + + private static int getAppOpLength(AggregationOperation aggOp) { + if (aggOp != null) { + return 1; + } + return 0; + } + + private static int getAttributesLength(Attribute[] attributes) { + if (attributes != null) { + return attributes.length; + } + return 0; + } + + /** + * Converts an int into it's inverse int to be used in (row) keys + * where we want to have the largest int value in the top of the table + * (scans start at the largest int first). + * + * @param key value to be inverted so that the latest version will be first in + * a scan. + * @return inverted int + */ + public static int invertInt(int key) { + return Integer.MAX_VALUE - key; + } + + /** + * returns the timestamp of that day's start (which is midnight 00:00:00 AM) + * for a given input timestamp. + * + * @param ts Timestamp. + * @return timestamp of that day's beginning (midnight) + */ + public static long getTopOfTheDayTimestamp(long ts) { + long dayTimestamp = ts - (ts % MILLIS_ONE_DAY); + return dayTimestamp; + } + + /** + * Checks if passed object is of integral type(Short/Integer/Long). + * + * @param obj Object to be checked. + * @return true if object passed is of type Short or Integer or Long, false + * otherwise. + */ + public static boolean isIntegralValue(Object obj) { + return (obj instanceof Short) || (obj instanceof Integer) || + (obj instanceof Long); + } + + /** + * A utility method that converts ApplicationId to string without using + * FastNumberFormat in order to avoid the incompatibility issue caused + * by mixing hadoop-common 2.5.1 and hadoop-yarn-api 3.0 in this module. + * This is a work-around implementation as discussed in YARN-6905. + * + * @param appId application id + * @return the string representation of the given application id + * + */ + public static String convertApplicationIdToString(ApplicationId appId) { + StringBuilder sb = new StringBuilder(64); + sb.append(ApplicationId.appIdStrPrefix); + sb.append("_"); + sb.append(appId.getClusterTimestamp()); + sb.append('_'); + sb.append(APP_ID_FORMAT.get().format(appId.getId())); + return sb.toString(); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverter.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverter.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverterToString.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverterToString.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverterToString.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/KeyConverterToString.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java similarity index 97% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java index 6ab69f7de1..0857980a20 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java @@ -40,7 +40,7 @@ public LongConverter() { @Override public byte[] encodeValue(Object value) throws IOException { - if (!HBaseTimelineStorageUtils.isIntegralValue(value)) { + if (!HBaseTimelineSchemaUtils.isIntegralValue(value)) { throw new IOException("Expected integral value"); } return Bytes.toBytes(((Number)value).longValue()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongKeyConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongKeyConverter.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongKeyConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongKeyConverter.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/RowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/RowKeyPrefix.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/RowKeyPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/RowKeyPrefix.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/StringKeyConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/StringKeyConverter.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/StringKeyConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/StringKeyConverter.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java new file mode 100644 index 0000000000..0df5b8af84 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.common contains + * a set of utility classes used across backend storage reader and writer. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java similarity index 79% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java index b228d84806..81961d3e1c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java @@ -17,17 +17,12 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.entity; -import java.io.IOException; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; @@ -56,10 +51,10 @@ public enum EntityColumn implements Column { */ FLOW_VERSION(EntityColumnFamily.INFO, "flow_version"); - private final ColumnHelper column; private final ColumnFamily columnFamily; private final String columnQualifier; private final byte[] columnQualifierBytes; + private final ValueConverter valueConverter; EntityColumn(ColumnFamily columnFamily, String columnQualifier) { @@ -73,7 +68,7 @@ public enum EntityColumn implements Column { // Future-proof by ensuring the right column prefix hygiene. this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); - this.column = new ColumnHelper(columnFamily, converter); + this.valueConverter = converter; } /** @@ -83,17 +78,6 @@ private String getColumnQualifier() { return columnQualifier; } - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, Long timestamp, - Object inputValue, Attribute... attributes) throws IOException { - column.store(rowKey, tableMutator, columnQualifierBytes, timestamp, - inputValue, attributes); - } - - public Object readResult(Result result) throws IOException { - return column.readResult(result, columnQualifierBytes); - } - @Override public byte[] getColumnQualifierBytes() { return columnQualifierBytes.clone(); @@ -106,7 +90,16 @@ public byte[] getColumnFamilyBytes() { @Override public ValueConverter getValueConverter() { - return column.getValueConverter(); + return valueConverter; } + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; + } + + @Override + public boolean supplementCellTimestamp() { + return false; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java similarity index 56% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java index d3851089c8..08234d3972 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java @@ -17,20 +17,13 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.entity; -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; @@ -69,7 +62,6 @@ public enum EntityColumnPrefix implements ColumnPrefix { */ METRIC(EntityColumnFamily.METRICS, null, new LongConverter()); - private final ColumnHelper column; private final ColumnFamily columnFamily; /** @@ -78,6 +70,7 @@ public enum EntityColumnPrefix implements ColumnPrefix { */ private final String columnPrefix; private final byte[] columnPrefixBytes; + private final ValueConverter valueConverter; /** * Private constructor, meant to be used by the enum definition. @@ -111,7 +104,7 @@ public enum EntityColumnPrefix implements ColumnPrefix { */ EntityColumnPrefix(ColumnFamily columnFamily, String columnPrefix, boolean compondColQual, ValueConverter converter) { - column = new ColumnHelper(columnFamily, converter); + this.valueConverter = converter; this.columnFamily = columnFamily; this.columnPrefix = columnPrefix; if (columnPrefix == null) { @@ -142,6 +135,11 @@ public byte[] getColumnPrefixBytes(String qualifierPrefix) { this.columnPrefixBytes, qualifierPrefix); } + @Override + public byte[] getColumnPrefixInBytes() { + return columnPrefixBytes != null ? columnPrefixBytes.clone() : null; + } + @Override public byte[] getColumnFamilyBytes() { return columnFamily.getBytes(); @@ -149,101 +147,16 @@ public byte[] getColumnFamilyBytes() { @Override public ValueConverter getValueConverter() { - return column.getValueConverter(); + return valueConverter; } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); + @Override + public boolean supplementCellTimeStamp() { + return false; } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResult(org.apache.hadoop.hbase.client.Result, java.lang.String) - */ - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = - ColumnHelper.getColumnQualifier(this.columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResults(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public Map readResults(Result result, - KeyConverter keyConverter) throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public NavigableMap> - readResultsWithTimestamps(Result result, KeyConverter keyConverter) - throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java new file mode 100644 index 0000000000..dceeb99b8a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.entity; + +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; + +/** + * The entity table as column families info, config and metrics. Info stores + * information about a timeline entity object config stores configuration data + * of a timeline entity object metrics stores the metrics of a timeline entity + * object + * + * Example entity table record: + * + *
+ * |-------------------------------------------------------------------------|
+ * |  Row       | Column Family                | Column Family| Column Family|
+ * |  key       | info                         | metrics      | config       |
+ * |-------------------------------------------------------------------------|
+ * | userName!  | id:entityId                  | metricId1:   | configKey1:  |
+ * | clusterId! |                              | metricValue1 | configValue1 |
+ * | flowName!  | type:entityType              | @timestamp1  |              |
+ * | flowRunId! |                              |              | configKey2:  |
+ * | AppId!     | created_time:                | metricId1:   | configValue2 |
+ * | entityType!| 1392993084018                | metricValue2 |              |
+ * | idPrefix!  |                              | @timestamp2  |              |
+ * | entityId   | i!infoKey:                   |              |              |
+ * |            | infoValue                    | metricId1:   |              |
+ * |            |                              | metricValue1 |              |
+ * |            | r!relatesToKey:              | @timestamp2  |              |
+ * |            | id3=id4=id5                  |              |              |
+ * |            |                              |              |              |
+ * |            | s!isRelatedToKey             |              |              |
+ * |            | id7=id9=id6                  |              |              |
+ * |            |                              |              |              |
+ * |            | e!eventId=timestamp=infoKey: |              |              |
+ * |            | eventInfoValue               |              |              |
+ * |            |                              |              |              |
+ * |            | flowVersion:                 |              |              |
+ * |            | versionValue                 |              |              |
+ * |-------------------------------------------------------------------------|
+ * 
+ */ +public final class EntityTable extends BaseTable { +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java new file mode 100644 index 0000000000..bb0e33133e --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.entity + * contains classes related to implementation for entity table. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.entity; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java new file mode 100644 index 0000000000..f468f0b40c --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java @@ -0,0 +1,133 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timelineservice.storage.flow; + +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; + +/** + * Identifies partially qualified columns for the {@link FlowActivityTable}. + */ +public enum FlowActivityColumnPrefix + implements ColumnPrefix { + + /** + * To store run ids of the flows. + */ + RUN_ID(FlowActivityColumnFamily.INFO, "r", null); + + private final ColumnFamily columnFamily; + private final ValueConverter valueConverter; + + /** + * Can be null for those cases where the provided column qualifier is the + * entire column name. + */ + private final String columnPrefix; + private final byte[] columnPrefixBytes; + + private final AggregationOperation aggOp; + + /** + * Private constructor, meant to be used by the enum definition. + * + * @param columnFamily + * that this column is stored in. + * @param columnPrefix + * for this column. + */ + private FlowActivityColumnPrefix( + ColumnFamily columnFamily, String columnPrefix, + AggregationOperation aggOp) { + this(columnFamily, columnPrefix, aggOp, false); + } + + private FlowActivityColumnPrefix( + ColumnFamily columnFamily, String columnPrefix, + AggregationOperation aggOp, boolean compoundColQual) { + this.valueConverter = GenericConverter.getInstance(); + this.columnFamily = columnFamily; + this.columnPrefix = columnPrefix; + if (columnPrefix == null) { + this.columnPrefixBytes = null; + } else { + // Future-proof by ensuring the right column prefix hygiene. + this.columnPrefixBytes = Bytes.toBytes(Separator.SPACE + .encode(columnPrefix)); + } + this.aggOp = aggOp; + } + + /** + * @return the column name value + */ + public String getColumnPrefix() { + return columnPrefix; + } + + @Override + public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { + return ColumnHelper.getColumnQualifier( + this.columnPrefixBytes, qualifierPrefix); + } + + @Override + public byte[] getColumnPrefixBytes(String qualifierPrefix) { + return ColumnHelper.getColumnQualifier( + this.columnPrefixBytes, qualifierPrefix); + } + + public byte[] getColumnPrefixBytes() { + return columnPrefixBytes.clone(); + } + + @Override + public byte[] getColumnFamilyBytes() { + return columnFamily.getBytes(); + } + + @Override + public byte[] getColumnPrefixInBytes() { + return columnPrefixBytes != null ? columnPrefixBytes.clone() : null; + } + + @Override + public ValueConverter getValueConverter() { + return valueConverter; + } + + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return HBaseTimelineSchemaUtils.combineAttributes(attributes, aggOp); + } + + @Override + public boolean supplementCellTimeStamp() { + return false; + } + + public AggregationOperation getAttribute() { + return aggOp; + } +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java similarity index 98% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java index b8a5dba6cd..747f6abfa2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderUtils; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverterToString; @@ -63,7 +63,7 @@ protected FlowActivityRowKey(String clusterId, Long timestamp, String userId, String flowName, boolean convertDayTsToTopOfDay) { this.clusterId = clusterId; if (convertDayTsToTopOfDay && (timestamp != null)) { - this.dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(timestamp); + this.dayTs = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(timestamp); } else { this.dayTs = timestamp; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java new file mode 100644 index 0000000000..e88a2fc66a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.flow; + +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; + +/** + * The flow activity table has column family info + * Stores the daily activity record for flows + * Useful as a quick lookup of what flows were + * running on a given day + * + * Example flow activity table record: + * + *
+ * |-------------------------------------------|
+ * |  Row key   | Column Family                |
+ * |            | info                         |
+ * |-------------------------------------------|
+ * | clusterId! | r!runid1:version1            |
+ * | inv Top of |                              |
+ * | Day!       | r!runid2:version7            |
+ * | userName!  |                              |
+ * | flowName   |                              |
+ * |-------------------------------------------|
+ * 
+ */ +public final class FlowActivityTable extends BaseTable { +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java similarity index 72% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java index 3797fafedc..2132d0470e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java @@ -17,18 +17,13 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.flow; -import java.io.IOException; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; /** @@ -55,11 +50,11 @@ AggregationOperation.GLOBAL_MAX, new LongConverter()), */ FLOW_VERSION(FlowRunColumnFamily.INFO, "flow_version", null); - private final ColumnHelper column; private final ColumnFamily columnFamily; private final String columnQualifier; private final byte[] columnQualifierBytes; private final AggregationOperation aggOp; + private final ValueConverter valueConverter; private FlowRunColumn(ColumnFamily columnFamily, String columnQualifier, AggregationOperation aggOp) { @@ -76,7 +71,7 @@ private FlowRunColumn(ColumnFamily columnFamily, // Future-proof by ensuring the right column prefix hygiene. this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE .encode(columnQualifier)); - this.column = new ColumnHelper(columnFamily, converter, true); + this.valueConverter = converter; } /** @@ -100,32 +95,18 @@ public AggregationOperation getAggregationOperation() { return aggOp; } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.Column#store - * (byte[], org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, Long timestamp, - Object inputValue, Attribute... attributes) throws IOException { - - Attribute[] combinedAttributes = - HBaseTimelineStorageUtils.combineAttributes(attributes, aggOp); - column.store(rowKey, tableMutator, columnQualifierBytes, timestamp, - inputValue, combinedAttributes); - } - - public Object readResult(Result result) throws IOException { - return column.readResult(result, columnQualifierBytes); + @Override + public ValueConverter getValueConverter() { + return valueConverter; } @Override - public ValueConverter getValueConverter() { - return column.getValueConverter(); + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return HBaseTimelineSchemaUtils.combineAttributes(attributes, aggOp); } + @Override + public boolean supplementCellTimestamp() { + return true; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnFamily.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnFamily.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnFamily.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java new file mode 100644 index 0000000000..cc06bb4526 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java @@ -0,0 +1,129 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timelineservice.storage.flow; + +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; + +/** + * Identifies partially qualified columns for the {@link FlowRunTable}. + */ +public enum FlowRunColumnPrefix implements ColumnPrefix { + + /** + * To store flow run info values. + */ + METRIC(FlowRunColumnFamily.INFO, "m", null, new LongConverter()); + + private final ColumnFamily columnFamily; + + /** + * Can be null for those cases where the provided column qualifier is the + * entire column name. + */ + private final String columnPrefix; + private final byte[] columnPrefixBytes; + private final ValueConverter valueConverter; + + private final AggregationOperation aggOp; + + /** + * Private constructor, meant to be used by the enum definition. + * + * @param columnFamily that this column is stored in. + * @param columnPrefix for this column. + */ + private FlowRunColumnPrefix(ColumnFamily columnFamily, + String columnPrefix, AggregationOperation fra, ValueConverter converter) { + this(columnFamily, columnPrefix, fra, converter, false); + } + + private FlowRunColumnPrefix(ColumnFamily columnFamily, + String columnPrefix, AggregationOperation fra, ValueConverter converter, + boolean compoundColQual) { + this.valueConverter = converter; + this.columnFamily = columnFamily; + this.columnPrefix = columnPrefix; + if (columnPrefix == null) { + this.columnPrefixBytes = null; + } else { + // Future-proof by ensuring the right column prefix hygiene. + this.columnPrefixBytes = + Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); + } + this.aggOp = fra; + } + + /** + * @return the column name value + */ + public String getColumnPrefix() { + return columnPrefix; + } + + public byte[] getColumnPrefixBytes() { + return columnPrefixBytes.clone(); + } + + @Override + public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { + return ColumnHelper.getColumnQualifier(this.columnPrefixBytes, + qualifierPrefix); + } + + @Override + public byte[] getColumnPrefixBytes(String qualifierPrefix) { + return ColumnHelper.getColumnQualifier(this.columnPrefixBytes, + qualifierPrefix); + } + + @Override + public byte[] getColumnFamilyBytes() { + return columnFamily.getBytes(); + } + + @Override + public byte[] getColumnPrefixInBytes() { + return columnPrefixBytes != null ? columnPrefixBytes.clone() : null; + } + + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return HBaseTimelineSchemaUtils.combineAttributes(attributes, aggOp); + } + + @Override + public boolean supplementCellTimeStamp() { + return true; + } + + public AggregationOperation getAttribute() { + return aggOp; + } + + @Override + public ValueConverter getValueConverter() { + return valueConverter; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKeyPrefix.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKeyPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKeyPrefix.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java new file mode 100644 index 0000000000..643a102b19 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.flow; + +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; + +/** + * The flow run table has column family info + * Stores per flow run information + * aggregated across applications. + * + * Metrics are also stored in the info column family. + * + * Example flow run table record: + * + *
+ * flow_run table
+ * |-------------------------------------------|
+ * |  Row key   | Column Family                |
+ * |            | info                         |
+ * |-------------------------------------------|
+ * | clusterId! | flow_version:version7        |
+ * | userName!  |                              |
+ * | flowName!  | running_apps:1               |
+ * | flowRunId  |                              |
+ * |            | min_start_time:1392995080000 |
+ * |            | #0:""                        |
+ * |            |                              |
+ * |            | min_start_time:1392995081012 |
+ * |            | #0:appId2                    |
+ * |            |                              |
+ * |            | min_start_time:1392993083210 |
+ * |            | #0:appId3                    |
+ * |            |                              |
+ * |            |                              |
+ * |            | max_end_time:1392993084018   |
+ * |            | #0:""                        |
+ * |            |                              |
+ * |            |                              |
+ * |            | m!mapInputRecords:127        |
+ * |            | #0:""                        |
+ * |            |                              |
+ * |            | m!mapInputRecords:31         |
+ * |            | #2:appId2                    |
+ * |            |                              |
+ * |            | m!mapInputRecords:37         |
+ * |            | #1:appId3                    |
+ * |            |                              |
+ * |            |                              |
+ * |            | m!mapOutputRecords:181       |
+ * |            | #0:""                        |
+ * |            |                              |
+ * |            | m!mapOutputRecords:37        |
+ * |            | #1:appId3                    |
+ * |            |                              |
+ * |            |                              |
+ * |-------------------------------------------|
+ * 
+ */ +public final class FlowRunTable extends BaseTable { +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java new file mode 100644 index 0000000000..04963f3f1d --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.flow + * contains classes related to implementation for flow related tables, viz. flow + * run table and flow activity table. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.flow; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java new file mode 100644 index 0000000000..e78db2a1ef --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage contains + * classes which define and implement reading and writing to backend storage. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumn.java similarity index 78% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumn.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumn.java index 46b0cc90e4..a011a3ff62 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumn.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumn.java @@ -17,17 +17,12 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.subapplication; -import java.io.IOException; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; @@ -57,10 +52,10 @@ public enum SubApplicationColumn implements Column { */ FLOW_VERSION(SubApplicationColumnFamily.INFO, "flow_version"); - private final ColumnHelper column; private final ColumnFamily columnFamily; private final String columnQualifier; private final byte[] columnQualifierBytes; + private final ValueConverter valueConverter; SubApplicationColumn(ColumnFamily columnFamily, String columnQualifier) { @@ -74,20 +69,7 @@ public enum SubApplicationColumn implements Column { // Future-proof by ensuring the right column prefix hygiene. this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); - this.column = new ColumnHelper(columnFamily, - converter); - } - - - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, Long timestamp, - Object inputValue, Attribute... attributes) throws IOException { - column.store(rowKey, tableMutator, columnQualifierBytes, timestamp, - inputValue, attributes); - } - - public Object readResult(Result result) throws IOException { - return column.readResult(result, columnQualifierBytes); + this.valueConverter = converter; } @Override @@ -102,7 +84,16 @@ public byte[] getColumnFamilyBytes() { @Override public ValueConverter getValueConverter() { - return column.getValueConverter(); + return valueConverter; } + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; + } + + @Override + public boolean supplementCellTimestamp() { + return false; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnFamily.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnFamily.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnFamily.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java similarity index 57% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java index 06eccedf3a..1106e37464 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java @@ -17,20 +17,13 @@ */ package org.apache.hadoop.yarn.server.timelineservice.storage.subapplication; -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; @@ -70,7 +63,6 @@ public enum SubApplicationColumnPrefix */ METRIC(SubApplicationColumnFamily.METRICS, null, new LongConverter()); - private final ColumnHelper column; private final ColumnFamily columnFamily; /** @@ -79,6 +71,7 @@ public enum SubApplicationColumnPrefix */ private final String columnPrefix; private final byte[] columnPrefixBytes; + private final ValueConverter valueConverter; /** * Private constructor, meant to be used by the enum definition. @@ -112,7 +105,7 @@ public enum SubApplicationColumnPrefix */ SubApplicationColumnPrefix(ColumnFamily columnFamily, String columnPrefix, boolean compondColQual, ValueConverter converter) { - column = new ColumnHelper(columnFamily, converter); + this.valueConverter = converter; this.columnFamily = columnFamily; this.columnPrefix = columnPrefix; if (columnPrefix == null) { @@ -143,6 +136,11 @@ public byte[] getColumnPrefixBytes(String qualifierPrefix) { this.columnPrefixBytes, qualifierPrefix); } + @Override + public byte[] getColumnPrefixInBytes() { + return columnPrefixBytes != null ? columnPrefixBytes.clone() : null; + } + @Override public byte[] getColumnFamilyBytes() { return columnFamily.getBytes(); @@ -150,101 +148,16 @@ public byte[] getColumnFamilyBytes() { @Override public ValueConverter getValueConverter() { - return column.getValueConverter(); + return valueConverter; } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); + @Override + public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { + return attributes; } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); + @Override + public boolean supplementCellTimeStamp() { + return false; } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResult(org.apache.hadoop.hbase.client.Result, java.lang.String) - */ - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = - ColumnHelper.getColumnQualifier(this.columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResults(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public Map readResults(Result result, - KeyConverter keyConverter) throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public NavigableMap> - readResultsWithTimestamps(Result result, KeyConverter keyConverter) - throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKey.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKey.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKey.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKeyPrefix.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKeyPrefix.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKeyPrefix.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java new file mode 100644 index 0000000000..de7dd4df9b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.subapplication; + +import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; + +/** + * The sub application table has column families: + * info, config and metrics. + * Info stores information about a timeline entity object + * config stores configuration data of a timeline entity object + * metrics stores the metrics of a timeline entity object + * + * Example sub application table record: + * + *
+ * |-------------------------------------------------------------------------|
+ * |  Row          | Column Family             | Column Family| Column Family|
+ * |  key          | info                      | metrics      | config       |
+ * |-------------------------------------------------------------------------|
+ * | subAppUserId! | id:entityId               | metricId1:   | configKey1:  |
+ * | clusterId!    | type:entityType           | metricValue1 | configValue1 |
+ * | entityType!   |                           | @timestamp1  |              |
+ * | idPrefix!|    |                           |              | configKey2:  |
+ * | entityId!     | created_time:             | metricId1:   | configValue2 |
+ * | userId        | 1392993084018             | metricValue2 |              |
+ * |               |                           | @timestamp2  |              |
+ * |               | i!infoKey:                |              |              |
+ * |               | infoValue                 | metricId1:   |              |
+ * |               |                           | metricValue1 |              |
+ * |               |                           | @timestamp2  |              |
+ * |               | e!eventId=timestamp=      |              |              |
+ * |               | infoKey:                  |              |              |
+ * |               | eventInfoValue            |              |              |
+ * |               |                           |              |              |
+ * |               | r!relatesToKey:           |              |              |
+ * |               | id3=id4=id5               |              |              |
+ * |               |                           |              |              |
+ * |               | s!isRelatedToKey          |              |              |
+ * |               | id7=id9=id6               |              |              |
+ * |               |                           |              |              |
+ * |               | flowVersion:              |              |              |
+ * |               | versionValue              |              |              |
+ * |-------------------------------------------------------------------------|
+ * 
+ */ +public final class SubApplicationTable extends BaseTable { +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java new file mode 100644 index 0000000000..52cc39964d --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.subapplication + * contains classes related to implementation for subapplication table. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.subapplication; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestCustomApplicationIdConversion.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestCustomApplicationIdConversion.java similarity index 95% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestCustomApplicationIdConversion.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestCustomApplicationIdConversion.java index 73bc29efc4..0dc344fdcd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestCustomApplicationIdConversion.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestCustomApplicationIdConversion.java @@ -32,7 +32,7 @@ public class TestCustomApplicationIdConversion { public void testConvertAplicationIdToString() { ApplicationId applicationId = ApplicationId.newInstance(0, 1); String applicationIdStr = - HBaseTimelineStorageUtils.convertApplicationIdToString(applicationId); + HBaseTimelineSchemaUtils.convertApplicationIdToString(applicationId); Assert.assertEquals(applicationId, ApplicationId.fromString(applicationIdStr)); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java similarity index 99% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java index 47702383ee..d05cbad29d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java @@ -195,7 +195,7 @@ public void testEntityRowKey() { @Test public void testFlowActivityRowKey() { Long ts = 1459900830000L; - Long dayTimestamp = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts); + Long dayTimestamp = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(ts); byte[] byteRowKey = new FlowActivityRowKey(CLUSTER, ts, USER, FLOW_NAME).getRowKey(); FlowActivityRowKey rowKey = FlowActivityRowKey.parseRowKey(byteRowKey); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeysAsString.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeysAsString.java similarity index 98% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeysAsString.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeysAsString.java index 148cf567c4..c4d07c7c30 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeysAsString.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeysAsString.java @@ -94,7 +94,7 @@ public void testEntityRowKey() { @Test(timeout = 10000) public void testFlowActivityRowKey() { Long ts = 1459900830000L; - Long dayTimestamp = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts); + Long dayTimestamp = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(ts); String rowKeyAsString = new FlowActivityRowKey(CLUSTER, ts, USER, FLOW_NAME) .getRowKeyAsString(); FlowActivityRowKey rowKey = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/pom.xml new file mode 100644 index 0000000000..d06907d5d9 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/pom.xml @@ -0,0 +1,161 @@ + + + + + hadoop-yarn-server-timelineservice-hbase + org.apache.hadoop + 3.2.0-SNAPSHOT + + + 4.0.0 + hadoop-yarn-server-timelineservice-hbase-server + Apache Hadoop YARN TimelineService HBase Server + 3.2.0-SNAPSHOT + + + + ${project.parent.parent.parent.basedir} + + + + + commons-logging + commons-logging + + + + com.google.guava + guava + + + + org.apache.hadoop + hadoop-common + provided + + + + org.apache.hadoop + hadoop-yarn-api + provided + + + + org.apache.hadoop + hadoop-yarn-server-timelineservice-hbase-common + + + + org.apache.hbase + hbase-common + provided + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.mortbay.jetty + jetty-util + + + + + + org.apache.hbase + hbase-client + provided + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + + + + org.apache.hbase + hbase-server + provided + + + org.apache.hadoop + hadoop-hdfs + + + org.apache.hadoop + hadoop-hdfs-client + + + org.apache.hadoop + hadoop-client + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + + + org.mortbay.jetty + jetty-sslengine + + + + + + + + + maven-assembly-plugin + + src/assembly/coprocessor.xml + true + + + + create-coprocessor-jar + prepare-package + + single + + + + + + maven-jar-plugin + + + + test-jar + + + + + + + \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/assembly/coprocessor.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/assembly/coprocessor.xml new file mode 100644 index 0000000000..01ff0dd063 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/assembly/coprocessor.xml @@ -0,0 +1,37 @@ + + + coprocessor + + jar + + false + + + / + true + true + runtime + + org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-common + org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-server + + + + \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineServerUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineServerUtils.java new file mode 100644 index 0000000000..5c07670a62 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineServerUtils.java @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * A utility class used by hbase-server module. + */ +public final class HBaseTimelineServerUtils { + private HBaseTimelineServerUtils() { + } + + /** + * Creates a {@link Tag} from the input attribute. + * + * @param attribute Attribute from which tag has to be fetched. + * @return a HBase Tag. + */ + public static Tag getTagFromAttribute(Map.Entry attribute) { + // attribute could be either an Aggregation Operation or + // an Aggregation Dimension + // Get the Tag type from either + AggregationOperation aggOp = AggregationOperation + .getAggregationOperation(attribute.getKey()); + if (aggOp != null) { + Tag t = new Tag(aggOp.getTagType(), attribute.getValue()); + return t; + } + + AggregationCompactionDimension aggCompactDim = + AggregationCompactionDimension.getAggregationCompactionDimension( + attribute.getKey()); + if (aggCompactDim != null) { + Tag t = new Tag(aggCompactDim.getTagType(), attribute.getValue()); + return t; + } + return null; + } + + /** + * creates a new cell based on the input cell but with the new value. + * + * @param origCell Original cell + * @param newValue new cell value + * @return cell + * @throws IOException while creating new cell. + */ + public static Cell createNewCell(Cell origCell, byte[] newValue) + throws IOException { + return CellUtil.createCell(CellUtil.cloneRow(origCell), + CellUtil.cloneFamily(origCell), CellUtil.cloneQualifier(origCell), + origCell.getTimestamp(), KeyValue.Type.Put.getCode(), newValue); + } + + /** + * creates a cell with the given inputs. + * + * @param row row of the cell to be created + * @param family column family name of the new cell + * @param qualifier qualifier for the new cell + * @param ts timestamp of the new cell + * @param newValue value of the new cell + * @param tags tags in the new cell + * @return cell + * @throws IOException while creating the cell. + */ + public static Cell createNewCell(byte[] row, byte[] family, byte[] qualifier, + long ts, byte[] newValue, byte[] tags) throws IOException { + return CellUtil.createCell(row, family, qualifier, ts, KeyValue.Type.Put, + newValue, tags); + } + + /** + * returns app id from the list of tags. + * + * @param tags cell tags to be looked into + * @return App Id as the AggregationCompactionDimension + */ + public static String getAggregationCompactionDimension(List tags) { + String appId = null; + for (Tag t : tags) { + if (AggregationCompactionDimension.APPLICATION_ID.getTagType() == t + .getType()) { + appId = Bytes.toString(t.getValue()); + return appId; + } + } + return appId; + } + + /** + * Returns the first seen aggregation operation as seen in the list of input + * tags or null otherwise. + * + * @param tags list of HBase tags. + * @return AggregationOperation + */ + public static AggregationOperation getAggregationOperationFromTagsList( + List tags) { + for (AggregationOperation aggOp : AggregationOperation.values()) { + for (Tag tag : tags) { + if (tag.getType() == aggOp.getTagType()) { + return aggOp; + } + } + } + return null; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java new file mode 100644 index 0000000000..0df5b8af84 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.common contains + * a set of utility classes used across backend storage reader and writer. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java similarity index 99% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java index 96a7cf3d96..41a371b522 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineServerUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -97,7 +97,7 @@ public void prePut(ObserverContext e, Put put, List tags = new ArrayList<>(); if ((attributes != null) && (attributes.size() > 0)) { for (Map.Entry attribute : attributes.entrySet()) { - Tag t = HBaseTimelineStorageUtils.getTagFromAttribute(attribute); + Tag t = HBaseTimelineServerUtils.getTagFromAttribute(attribute); if (t != null) { tags.add(t); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java similarity index 98% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java index dbd04843eb..7f09e518ce 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineServerUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.NumericValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator; @@ -250,17 +250,11 @@ private AggregationOperation getCurrentAggOp(Cell cell) { List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); // We assume that all the operations for a particular column are the same - return HBaseTimelineStorageUtils.getAggregationOperationFromTagsList(tags); + return HBaseTimelineServerUtils.getAggregationOperationFromTagsList(tags); } /** * resets the parameters to an initialized state for next loop iteration. - * - * @param cell - * @param currentAggOp - * @param currentColumnCells - * @param alreadySeenAggDim - * @param collectedButNotEmitted */ private void resetState(SortedSet currentColumnCells, Set alreadySeenAggDim) { @@ -324,7 +318,7 @@ private void collectCells(SortedSet currentColumnCells, // only if this app has not been seen yet, add to current column cells List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); - String aggDim = HBaseTimelineStorageUtils + String aggDim = HBaseTimelineServerUtils .getAggregationCompactionDimension(tags); if (!alreadySeenAggDim.contains(aggDim)) { // if this agg dimension has already been seen, @@ -420,7 +414,7 @@ private Cell processSummation(SortedSet currentColumnCells, } byte[] sumBytes = converter.encodeValue(sum); Cell sumCell = - HBaseTimelineStorageUtils.createNewCell(mostRecentCell, sumBytes); + HBaseTimelineServerUtils.createNewCell(mostRecentCell, sumBytes); return sumCell; } @@ -462,7 +456,7 @@ List processSummationMajorCompaction( // if this is the existing flow sum cell List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); - String appId = HBaseTimelineStorageUtils + String appId = HBaseTimelineServerUtils .getAggregationCompactionDimension(tags); if (appId == FLOW_APP_ID) { sum = converter.add(sum, currentValue); @@ -504,7 +498,7 @@ List processSummationMajorCompaction( Bytes.toBytes(FLOW_APP_ID)); tags.add(t); byte[] tagByteArray = Tag.fromList(tags); - Cell sumCell = HBaseTimelineStorageUtils.createNewCell( + Cell sumCell = HBaseTimelineServerUtils.createNewCell( CellUtil.cloneRow(anyCell), CellUtil.cloneFamily(anyCell), CellUtil.cloneQualifier(anyCell), diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScannerOperation.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScannerOperation.java similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScannerOperation.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScannerOperation.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java new file mode 100644 index 0000000000..04963f3f1d --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage.flow + * contains classes related to implementation for flow related tables, viz. flow + * run table and flow activity table. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage.flow; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java new file mode 100644 index 0000000000..e78db2a1ef --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.yarn.server.timelineservice.storage contains + * classes which define and implement reading and writing to backend storage. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.storage; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/pom.xml index 6369864e1a..7e5a803e78 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/pom.xml @@ -18,197 +18,24 @@ + http://maven.apache.org/xsd/maven-4.0.0.xsd"> hadoop-yarn-server org.apache.hadoop 3.2.0-SNAPSHOT + 4.0.0 hadoop-yarn-server-timelineservice-hbase + 3.2.0-SNAPSHOT Apache Hadoop YARN TimelineService HBase Backend + pom - - - ${project.parent.parent.basedir} - + + hadoop-yarn-server-timelineservice-hbase-client + hadoop-yarn-server-timelineservice-hbase-common + hadoop-yarn-server-timelineservice-hbase-server + - - - commons-logging - commons-logging - - - commons-lang - commons-lang - - - - commons-cli - commons-cli - - - - com.google.guava - guava - - - - org.apache.hadoop - hadoop-annotations - provided - - - - org.apache.hadoop - hadoop-common - provided - - - - - org.apache.hadoop - hadoop-common - test-jar - test - - - - org.apache.hadoop - hadoop-yarn-api - provided - - - - org.apache.hadoop - hadoop-yarn-common - provided - - - - org.apache.hadoop - hadoop-yarn-server-applicationhistoryservice - provided - - - - org.apache.hadoop - hadoop-yarn-server-timelineservice - - - - org.apache.hbase - hbase-common - - - org.apache.hadoop - hadoop-mapreduce-client-core - - - org.mortbay.jetty - jetty-util - - - - - - org.apache.hbase - hbase-client - - - org.apache.hadoop - hadoop-mapreduce-client-core - - - - - - org.apache.hbase - hbase-server - provided - - - org.apache.hadoop - hadoop-hdfs - - - org.apache.hadoop - hadoop-hdfs-client - - - org.apache.hadoop - hadoop-client - - - org.apache.hadoop - hadoop-mapreduce-client-core - - - org.mortbay.jetty - jetty - - - org.mortbay.jetty - jetty-util - - - org.mortbay.jetty - jetty-sslengine - - - - - - junit - junit - test - - - - - - - maven-jar-plugin - - - - test-jar - - test-compile - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - - junit - junit - 4.11 - - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - package - - copy-dependencies - - - runtime - org.slf4j,org.apache.hadoop,com.github.stephenc.findbugs - ${project.build.directory}/lib - - - - - - - + \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java deleted file mode 100644 index 752a3803fe..0000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnPrefix.java +++ /dev/null @@ -1,206 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow; - -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; - -/** - * Identifies partially qualified columns for the app-to-flow table. - */ -public enum AppToFlowColumnPrefix implements ColumnPrefix { - - /** - * The flow name. - */ - FLOW_NAME(AppToFlowColumnFamily.MAPPING, "flow_name"), - - /** - * The flow run ID. - */ - FLOW_RUN_ID(AppToFlowColumnFamily.MAPPING, "flow_run_id"), - - /** - * The user. - */ - USER_ID(AppToFlowColumnFamily.MAPPING, "user_id"); - - private final ColumnHelper column; - private final ColumnFamily columnFamily; - private final String columnPrefix; - private final byte[] columnPrefixBytes; - - AppToFlowColumnPrefix(ColumnFamily columnFamily, - String columnPrefix) { - this.columnFamily = columnFamily; - this.columnPrefix = columnPrefix; - if (columnPrefix == null) { - this.columnPrefixBytes = null; - } else { - // Future-proof by ensuring the right column prefix hygiene. - this.columnPrefixBytes = - Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); - } - this.column = new ColumnHelper(columnFamily); - } - - @Override - public byte[] getColumnPrefixBytes(String qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - columnPrefixBytes, qualifierPrefix); - } - - @Override - public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - columnPrefixBytes, qualifierPrefix); - } - - @Override - public byte[] getColumnFamilyBytes() { - return columnFamily.getBytes(); - } - - @Override - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); - } - - @Override - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); - } - - @Override - public ValueConverter getValueConverter() { - return column.getValueConverter(); - } - - @Override - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = - ColumnHelper.getColumnQualifier(columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); - } - - @Override - public Map readResults(Result result, - KeyConverter keyConverter) - throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - @Override - public NavigableMap> - readResultsWithTimestamps(Result result, - KeyConverter keyConverter) throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - - /** - * Retrieve an {@link AppToFlowColumnPrefix} given a name, or null if there - * is no match. The following holds true: {@code columnFor(x) == columnFor(y)} - * if and only if {@code x.equals(y)} or {@code (x == y == null)} - * - * @param columnPrefix Name of the column to retrieve - * @return the corresponding {@link AppToFlowColumnPrefix} or null - */ - public static final AppToFlowColumnPrefix columnFor(String columnPrefix) { - - // Match column based on value, assume column family matches. - for (AppToFlowColumnPrefix afcp : AppToFlowColumnPrefix.values()) { - // Find a match based only on name. - if (afcp.columnPrefix.equals(columnPrefix)) { - return afcp; - } - } - - // Default to null - return null; - } - - /** - * Retrieve an {@link AppToFlowColumnPrefix} given a name, or null if there - * is no match. The following holds true: - * {@code columnFor(a,x) == columnFor(b,y)} if and only if - * {@code (x == y == null)} or {@code a.equals(b) & x.equals(y)} - * - * @param columnFamily The columnFamily for which to retrieve the column. - * @param columnPrefix Name of the column to retrieve - * @return the corresponding {@link AppToFlowColumnPrefix} or null if both - * arguments don't match. - */ - public static final AppToFlowColumnPrefix columnFor( - AppToFlowColumnFamily columnFamily, String columnPrefix) { - - // TODO: needs unit test to confirm and need to update javadoc to explain - // null prefix case. - - for (AppToFlowColumnPrefix afcp : AppToFlowColumnPrefix.values()) { - // Find a match based column family and on name. - if (afcp.columnFamily.equals(columnFamily) - && (((columnPrefix == null) && (afcp.columnPrefix == null)) || - (afcp.columnPrefix.equals(columnPrefix)))) { - return afcp; - } - } - - // Default to null - return null; - } -} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java deleted file mode 100644 index 89aa013a1b..0000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java +++ /dev/null @@ -1,145 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.common; - -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; - -/** - * Used to represent a partially qualified column, where the actual column name - * will be composed of a prefix and the remainder of the column qualifier. The - * prefix can be null, in which case the column qualifier will be completely - * determined when the values are stored. - */ -public interface ColumnPrefix { - - /** - * Sends a Mutation to the table. The mutations will be buffered and sent over - * the wire as part of a batch. - * - * @param rowKey identifying the row to write. Nothing gets written when null. - * @param tableMutator used to modify the underlying HBase table. Caller is - * responsible to pass a mutator for the table that actually has this - * column. - * @param qualifier column qualifier. Nothing gets written when null. - * @param timestamp version timestamp. When null the server timestamp will be - * used. - * @param attributes attributes for the mutation that are used by the - * coprocessor to set/read the cell tags. - * @param inputValue the value to write to the rowKey and column qualifier. - * Nothing gets written when null. - * @throws IOException if there is any exception encountered while doing - * store operation(sending mutation to the table). - */ - void store(byte[] rowKey, TypedBufferedMutator tableMutator, - byte[] qualifier, Long timestamp, Object inputValue, - Attribute... attributes) throws IOException; - - /** - * Sends a Mutation to the table. The mutations will be buffered and sent over - * the wire as part of a batch. - * - * @param rowKey identifying the row to write. Nothing gets written when null. - * @param tableMutator used to modify the underlying HBase table. Caller is - * responsible to pass a mutator for the table that actually has this - * column. - * @param qualifier column qualifier. Nothing gets written when null. - * @param timestamp version timestamp. When null the server timestamp will be - * used. - * @param attributes attributes for the mutation that are used by the - * coprocessor to set/read the cell tags. - * @param inputValue the value to write to the rowKey and column qualifier. - * Nothing gets written when null. - * @throws IOException if there is any exception encountered while doing - * store operation(sending mutation to the table). - */ - void store(byte[] rowKey, TypedBufferedMutator tableMutator, - String qualifier, Long timestamp, Object inputValue, - Attribute... attributes) throws IOException; - - /** - * Get the latest version of this specified column. Note: this call clones the - * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}. - * - * @param result Cannot be null - * @param qualifier column qualifier. Nothing gets read when null. - * @return result object (can be cast to whatever object was written to) or - * null when specified column qualifier for this prefix doesn't exist - * in the result. - * @throws IOException if there is any exception encountered while reading - * result. - */ - Object readResult(Result result, String qualifier) throws IOException; - - /** - * - * @param identifies the type of key converter. - * @param result from which to read columns. - * @param keyConverter used to convert column bytes to the appropriate key - * type - * @return the latest values of columns in the column family with this prefix - * (or all of them if the prefix value is null). - * @throws IOException if there is any exception encountered while reading - * results. - */ - Map readResults(Result result, KeyConverter keyConverter) - throws IOException; - - /** - * @param result from which to reads data with timestamps. - * @param identifies the type of key converter. - * @param the type of the values. The values will be cast into that type. - * @param keyConverter used to convert column bytes to the appropriate key - * type. - * @return the cell values at each respective time in for form - * {@literal {idA={timestamp1->value1}, idA={timestamp2->value2}, - * idB={timestamp3->value3}, idC={timestamp1->value4}}} - * @throws IOException if there is any exception encountered while reading - * result. - */ - NavigableMap> readResultsWithTimestamps( - Result result, KeyConverter keyConverter) throws IOException; - - /** - * @param qualifierPrefix Column qualifier or prefix of qualifier. - * @return a byte array encoding column prefix and qualifier/prefix passed. - */ - byte[] getColumnPrefixBytes(String qualifierPrefix); - - /** - * @param qualifierPrefix Column qualifier or prefix of qualifier. - * @return a byte array encoding column prefix and qualifier/prefix passed. - */ - byte[] getColumnPrefixBytes(byte[] qualifierPrefix); - - /** - * Returns column family name(as bytes) associated with this column prefix. - * @return a byte array encoding column family for this prefix. - */ - byte[] getColumnFamilyBytes(); - - /** - * Returns value converter implementation associated with this column prefix. - * @return a {@link ValueConverter} implementation. - */ - ValueConverter getValueConverter(); -} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java deleted file mode 100644 index c115b18086..0000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java +++ /dev/null @@ -1,354 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ - -package org.apache.hadoop.yarn.server.timelineservice.storage.common; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.client.Query; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.text.NumberFormat; - -/** - * A bunch of utility functions used in HBase TimelineService backend. - */ -public final class HBaseTimelineStorageUtils { - /** milliseconds in one day. */ - public static final long MILLIS_ONE_DAY = 86400000L; - private static final Logger LOG = - LoggerFactory.getLogger(HBaseTimelineStorageUtils.class); - - private HBaseTimelineStorageUtils() { - } - - - /** - * Combines the input array of attributes and the input aggregation operation - * into a new array of attributes. - * - * @param attributes Attributes to be combined. - * @param aggOp Aggregation operation. - * @return array of combined attributes. - */ - public static Attribute[] combineAttributes(Attribute[] attributes, - AggregationOperation aggOp) { - int newLength = getNewLengthCombinedAttributes(attributes, aggOp); - Attribute[] combinedAttributes = new Attribute[newLength]; - - if (attributes != null) { - System.arraycopy(attributes, 0, combinedAttributes, 0, attributes.length); - } - - if (aggOp != null) { - Attribute a2 = aggOp.getAttribute(); - combinedAttributes[newLength - 1] = a2; - } - return combinedAttributes; - } - - /** - * Returns a number for the new array size. The new array is the combination - * of input array of attributes and the input aggregation operation. - * - * @param attributes Attributes. - * @param aggOp Aggregation operation. - * @return the size for the new array - */ - private static int getNewLengthCombinedAttributes(Attribute[] attributes, - AggregationOperation aggOp) { - int oldLength = getAttributesLength(attributes); - int aggLength = getAppOpLength(aggOp); - return oldLength + aggLength; - } - - private static int getAppOpLength(AggregationOperation aggOp) { - if (aggOp != null) { - return 1; - } - return 0; - } - - private static int getAttributesLength(Attribute[] attributes) { - if (attributes != null) { - return attributes.length; - } - return 0; - } - - /** - * Returns the first seen aggregation operation as seen in the list of input - * tags or null otherwise. - * - * @param tags list of HBase tags. - * @return AggregationOperation - */ - public static AggregationOperation getAggregationOperationFromTagsList( - List tags) { - for (AggregationOperation aggOp : AggregationOperation.values()) { - for (Tag tag : tags) { - if (tag.getType() == aggOp.getTagType()) { - return aggOp; - } - } - } - return null; - } - - /** - * Creates a {@link Tag} from the input attribute. - * - * @param attribute Attribute from which tag has to be fetched. - * @return a HBase Tag. - */ - public static Tag getTagFromAttribute(Map.Entry attribute) { - // attribute could be either an Aggregation Operation or - // an Aggregation Dimension - // Get the Tag type from either - AggregationOperation aggOp = AggregationOperation - .getAggregationOperation(attribute.getKey()); - if (aggOp != null) { - Tag t = new Tag(aggOp.getTagType(), attribute.getValue()); - return t; - } - - AggregationCompactionDimension aggCompactDim = - AggregationCompactionDimension.getAggregationCompactionDimension( - attribute.getKey()); - if (aggCompactDim != null) { - Tag t = new Tag(aggCompactDim.getTagType(), attribute.getValue()); - return t; - } - return null; - } - - /** - * creates a new cell based on the input cell but with the new value. - * - * @param origCell Original cell - * @param newValue new cell value - * @return cell - * @throws IOException while creating new cell. - */ - public static Cell createNewCell(Cell origCell, byte[] newValue) - throws IOException { - return CellUtil.createCell(CellUtil.cloneRow(origCell), - CellUtil.cloneFamily(origCell), CellUtil.cloneQualifier(origCell), - origCell.getTimestamp(), KeyValue.Type.Put.getCode(), newValue); - } - - /** - * creates a cell with the given inputs. - * - * @param row row of the cell to be created - * @param family column family name of the new cell - * @param qualifier qualifier for the new cell - * @param ts timestamp of the new cell - * @param newValue value of the new cell - * @param tags tags in the new cell - * @return cell - * @throws IOException while creating the cell. - */ - public static Cell createNewCell(byte[] row, byte[] family, byte[] qualifier, - long ts, byte[] newValue, byte[] tags) throws IOException { - return CellUtil.createCell(row, family, qualifier, ts, KeyValue.Type.Put, - newValue, tags); - } - - /** - * returns app id from the list of tags. - * - * @param tags cell tags to be looked into - * @return App Id as the AggregationCompactionDimension - */ - public static String getAggregationCompactionDimension(List tags) { - String appId = null; - for (Tag t : tags) { - if (AggregationCompactionDimension.APPLICATION_ID.getTagType() == t - .getType()) { - appId = Bytes.toString(t.getValue()); - return appId; - } - } - return appId; - } - - /** - * Converts an int into it's inverse int to be used in (row) keys - * where we want to have the largest int value in the top of the table - * (scans start at the largest int first). - * - * @param key value to be inverted so that the latest version will be first in - * a scan. - * @return inverted int - */ - public static int invertInt(int key) { - return Integer.MAX_VALUE - key; - } - - /** - * returns the timestamp of that day's start (which is midnight 00:00:00 AM) - * for a given input timestamp. - * - * @param ts Timestamp. - * @return timestamp of that day's beginning (midnight) - */ - public static long getTopOfTheDayTimestamp(long ts) { - long dayTimestamp = ts - (ts % MILLIS_ONE_DAY); - return dayTimestamp; - } - - private static final ThreadLocal APP_ID_FORMAT = - new ThreadLocal() { - @Override - public NumberFormat initialValue() { - NumberFormat fmt = NumberFormat.getInstance(); - fmt.setGroupingUsed(false); - fmt.setMinimumIntegerDigits(4); - return fmt; - } - }; - - /** - * A utility method that converts ApplicationId to string without using - * FastNumberFormat in order to avoid the incompatibility issue caused - * by mixing hadoop-common 2.5.1 and hadoop-yarn-api 3.0 in this module. - * This is a work-around implementation as discussed in YARN-6905. - * - * @param appId application id - * @return the string representation of the given application id - * - */ - public static String convertApplicationIdToString(ApplicationId appId) { - StringBuilder sb = new StringBuilder(64); - sb.append(ApplicationId.appIdStrPrefix); - sb.append("_"); - sb.append(appId.getClusterTimestamp()); - sb.append('_'); - sb.append(APP_ID_FORMAT.get().format(appId.getId())); - return sb.toString(); - } - - /** - * @param conf YARN configuration. Used to see if there is an explicit config - * pointing to the HBase config file to read. It should not be null - * or a NullPointerException will be thrown. - * @return a configuration with the HBase configuration from the classpath, - * optionally overwritten by the timeline service configuration URL if - * specified. - * @throws MalformedURLException if a timeline service HBase configuration URL - * is specified but is a malformed URL. - */ - public static Configuration getTimelineServiceHBaseConf(Configuration conf) - throws MalformedURLException { - if (conf == null) { - throw new NullPointerException(); - } - - Configuration hbaseConf; - String timelineServiceHBaseConfFileURL = - conf.get(YarnConfiguration.TIMELINE_SERVICE_HBASE_CONFIGURATION_FILE); - if (timelineServiceHBaseConfFileURL != null - && timelineServiceHBaseConfFileURL.length() > 0) { - LOG.info("Using hbase configuration at " + - timelineServiceHBaseConfFileURL); - // create a clone so that we don't mess with out input one - hbaseConf = new Configuration(conf); - Configuration plainHBaseConf = new Configuration(false); - URL hbaseSiteXML = new URL(timelineServiceHBaseConfFileURL); - plainHBaseConf.addResource(hbaseSiteXML); - HBaseConfiguration.merge(hbaseConf, plainHBaseConf); - } else { - // default to what is on the classpath - hbaseConf = HBaseConfiguration.create(conf); - } - return hbaseConf; - } - - /** - * Given a row key prefix stored in a byte array, return a byte array for its - * immediate next row key. - * - * @param rowKeyPrefix The provided row key prefix, represented in an array. - * @return the closest next row key of the provided row key. - */ - public static byte[] calculateTheClosestNextRowKeyForPrefix( - byte[] rowKeyPrefix) { - // Essentially we are treating it like an 'unsigned very very long' and - // doing +1 manually. - // Search for the place where the trailing 0xFFs start - int offset = rowKeyPrefix.length; - while (offset > 0) { - if (rowKeyPrefix[offset - 1] != (byte) 0xFF) { - break; - } - offset--; - } - - if (offset == 0) { - // We got an 0xFFFF... (only FFs) stopRow value which is - // the last possible prefix before the end of the table. - // So set it to stop at the 'end of the table' - return HConstants.EMPTY_END_ROW; - } - - // Copy the right length of the original - byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset); - // And increment the last one - newStopRow[newStopRow.length - 1]++; - return newStopRow; - } - - /** - * Checks if passed object is of integral type(Short/Integer/Long). - * - * @param obj Object to be checked. - * @return true if object passed is of type Short or Integer or Long, false - * otherwise. - */ - public static boolean isIntegralValue(Object obj) { - return (obj instanceof Short) || (obj instanceof Integer) || - (obj instanceof Long); - } - - public static void setMetricsTimeRange(Query query, byte[] metricsCf, - long tsBegin, long tsEnd) { - if (tsBegin != 0 || tsEnd != Long.MAX_VALUE) { - query.setColumnFamilyTimeRange(metricsCf, - tsBegin, ((tsEnd == Long.MAX_VALUE) ? Long.MAX_VALUE : (tsEnd + 1))); - } - } -} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java deleted file mode 100644 index 706b002f93..0000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java +++ /dev/null @@ -1,221 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; - -/** - * Identifies partially qualified columns for the {@link FlowActivityTable}. - */ -public enum FlowActivityColumnPrefix - implements ColumnPrefix { - - /** - * To store run ids of the flows. - */ - RUN_ID(FlowActivityColumnFamily.INFO, "r", null); - - private final ColumnHelper column; - private final ColumnFamily columnFamily; - - /** - * Can be null for those cases where the provided column qualifier is the - * entire column name. - */ - private final String columnPrefix; - private final byte[] columnPrefixBytes; - - private final AggregationOperation aggOp; - - /** - * Private constructor, meant to be used by the enum definition. - * - * @param columnFamily - * that this column is stored in. - * @param columnPrefix - * for this column. - */ - private FlowActivityColumnPrefix( - ColumnFamily columnFamily, String columnPrefix, - AggregationOperation aggOp) { - this(columnFamily, columnPrefix, aggOp, false); - } - - private FlowActivityColumnPrefix( - ColumnFamily columnFamily, String columnPrefix, - AggregationOperation aggOp, boolean compoundColQual) { - column = new ColumnHelper(columnFamily); - this.columnFamily = columnFamily; - this.columnPrefix = columnPrefix; - if (columnPrefix == null) { - this.columnPrefixBytes = null; - } else { - // Future-proof by ensuring the right column prefix hygiene. - this.columnPrefixBytes = Bytes.toBytes(Separator.SPACE - .encode(columnPrefix)); - } - this.aggOp = aggOp; - } - - /** - * @return the column name value - */ - public String getColumnPrefix() { - return columnPrefix; - } - - @Override - public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifierPrefix); - } - - @Override - public byte[] getColumnPrefixBytes(String qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifierPrefix); - } - - public byte[] getColumnPrefixBytes() { - return columnPrefixBytes.clone(); - } - - @Override - public byte[] getColumnFamilyBytes() { - return columnFamily.getBytes(); - } - - @Override - public ValueConverter getValueConverter() { - return column.getValueConverter(); - } - - public AggregationOperation getAttribute() { - return aggOp; - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, byte[], java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - @Override - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - Attribute[] combinedAttributes = - HBaseTimelineStorageUtils.combineAttributes(attributes, this.aggOp); - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - combinedAttributes); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResult(org.apache.hadoop.hbase.client.Result, java.lang.String) - */ - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResults(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public Map readResults(Result result, - KeyConverter keyConverter) throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public NavigableMap> - readResultsWithTimestamps(Result result, KeyConverter keyConverter) - throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - @Override - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - Attribute[] combinedAttributes = - HBaseTimelineStorageUtils.combineAttributes(attributes, this.aggOp); - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - combinedAttributes); - } -} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java deleted file mode 100644 index f521cd7184..0000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java +++ /dev/null @@ -1,217 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; - -/** - * Identifies partially qualified columns for the {@link FlowRunTable}. - */ -public enum FlowRunColumnPrefix implements ColumnPrefix { - - /** - * To store flow run info values. - */ - METRIC(FlowRunColumnFamily.INFO, "m", null, new LongConverter()); - - private final ColumnHelper column; - private final ColumnFamily columnFamily; - - /** - * Can be null for those cases where the provided column qualifier is the - * entire column name. - */ - private final String columnPrefix; - private final byte[] columnPrefixBytes; - - private final AggregationOperation aggOp; - - /** - * Private constructor, meant to be used by the enum definition. - * - * @param columnFamily that this column is stored in. - * @param columnPrefix for this column. - */ - private FlowRunColumnPrefix(ColumnFamily columnFamily, - String columnPrefix, AggregationOperation fra, ValueConverter converter) { - this(columnFamily, columnPrefix, fra, converter, false); - } - - private FlowRunColumnPrefix(ColumnFamily columnFamily, - String columnPrefix, AggregationOperation fra, ValueConverter converter, - boolean compoundColQual) { - column = new ColumnHelper(columnFamily, converter, true); - this.columnFamily = columnFamily; - this.columnPrefix = columnPrefix; - if (columnPrefix == null) { - this.columnPrefixBytes = null; - } else { - // Future-proof by ensuring the right column prefix hygiene. - this.columnPrefixBytes = - Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); - } - this.aggOp = fra; - } - - /** - * @return the column name value - */ - public String getColumnPrefix() { - return columnPrefix; - } - - public byte[] getColumnPrefixBytes() { - return columnPrefixBytes.clone(); - } - - @Override - public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { - return ColumnHelper.getColumnQualifier(this.columnPrefixBytes, - qualifierPrefix); - } - - @Override - public byte[] getColumnPrefixBytes(String qualifierPrefix) { - return ColumnHelper.getColumnQualifier(this.columnPrefixBytes, - qualifierPrefix); - } - - @Override - public byte[] getColumnFamilyBytes() { - return columnFamily.getBytes(); - } - - public AggregationOperation getAttribute() { - return aggOp; - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - Attribute[] combinedAttributes = - HBaseTimelineStorageUtils.combineAttributes(attributes, this.aggOp); - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - combinedAttributes); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object) - */ - public void store(byte[] rowKey, - TypedBufferedMutator tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - Attribute[] combinedAttributes = - HBaseTimelineStorageUtils.combineAttributes(attributes, this.aggOp); - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - combinedAttributes); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResult(org.apache.hadoop.hbase.client.Result, java.lang.String) - */ - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = - ColumnHelper.getColumnQualifier(this.columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResults(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public Map readResults(Result result, - KeyConverter keyConverter) throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public NavigableMap> - readResultsWithTimestamps(Result result, KeyConverter keyConverter) - throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - - @Override - public ValueConverter getValueConverter() { - return column.getValueConverter(); - } - -}