info : eventInfo.entrySet()) {
// eventId=infoKey
byte[] columnQualifierBytes =
new EventColumnName(eventId, eventTimestamp, info.getKey())
.getColumnQualifier();
- columnPrefix.store(rowKey, table, columnQualifierBytes, null,
- info.getValue());
+ ColumnRWHelper.store(rowKey, table, columnPrefix,
+ columnQualifierBytes, null, info.getValue());
} // for info: eventInfo
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
similarity index 92%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
index c9f7cecdf56..e9e47707294 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
@@ -37,13 +37,13 @@ import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
-import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
-import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
-import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTable;
-import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable;
-import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
@@ -94,26 +94,27 @@ public final class TimelineSchemaCreator {
String entityTableName = commandLine.getOptionValue(
ENTITY_TABLE_NAME_SHORT);
if (StringUtils.isNotBlank(entityTableName)) {
- hbaseConf.set(EntityTable.TABLE_NAME_CONF_NAME, entityTableName);
+ hbaseConf.set(EntityTableRW.TABLE_NAME_CONF_NAME, entityTableName);
}
// Grab the entity metrics TTL
String entityTableMetricsTTL = commandLine.getOptionValue(
ENTITY_METRICS_TTL_OPTION_SHORT);
if (StringUtils.isNotBlank(entityTableMetricsTTL)) {
int entityMetricsTTL = Integer.parseInt(entityTableMetricsTTL);
- new EntityTable().setMetricsTTL(entityMetricsTTL, hbaseConf);
+ new EntityTableRW().setMetricsTTL(entityMetricsTTL, hbaseConf);
}
// Grab the appToflowTableName argument
String appToflowTableName = commandLine.getOptionValue(
APP_TO_FLOW_TABLE_NAME_SHORT);
if (StringUtils.isNotBlank(appToflowTableName)) {
- hbaseConf.set(AppToFlowTable.TABLE_NAME_CONF_NAME, appToflowTableName);
+ hbaseConf.set(
+ AppToFlowTableRW.TABLE_NAME_CONF_NAME, appToflowTableName);
}
// Grab the applicationTableName argument
String applicationTableName = commandLine.getOptionValue(
APP_TABLE_NAME_SHORT);
if (StringUtils.isNotBlank(applicationTableName)) {
- hbaseConf.set(ApplicationTable.TABLE_NAME_CONF_NAME,
+ hbaseConf.set(ApplicationTableRW.TABLE_NAME_CONF_NAME,
applicationTableName);
}
// Grab the application metrics TTL
@@ -121,14 +122,14 @@ public final class TimelineSchemaCreator {
APP_METRICS_TTL_OPTION_SHORT);
if (StringUtils.isNotBlank(applicationTableMetricsTTL)) {
int appMetricsTTL = Integer.parseInt(applicationTableMetricsTTL);
- new ApplicationTable().setMetricsTTL(appMetricsTTL, hbaseConf);
+ new ApplicationTableRW().setMetricsTTL(appMetricsTTL, hbaseConf);
}
// Grab the subApplicationTableName argument
String subApplicationTableName = commandLine.getOptionValue(
SUB_APP_TABLE_NAME_SHORT);
if (StringUtils.isNotBlank(subApplicationTableName)) {
- hbaseConf.set(SubApplicationTable.TABLE_NAME_CONF_NAME,
+ hbaseConf.set(SubApplicationTableRW.TABLE_NAME_CONF_NAME,
subApplicationTableName);
}
// Grab the subApplication metrics TTL
@@ -136,7 +137,7 @@ public final class TimelineSchemaCreator {
.getOptionValue(SUB_APP_METRICS_TTL_OPTION_SHORT);
if (StringUtils.isNotBlank(subApplicationTableMetricsTTL)) {
int subAppMetricsTTL = Integer.parseInt(subApplicationTableMetricsTTL);
- new SubApplicationTable().setMetricsTTL(subAppMetricsTTL, hbaseConf);
+ new SubApplicationTableRW().setMetricsTTL(subAppMetricsTTL, hbaseConf);
}
// create all table schemas in hbase
@@ -303,7 +304,7 @@ public final class TimelineSchemaCreator {
throw new IOException("Cannot create table since admin is null");
}
try {
- new EntityTable().createTable(admin, hbaseConf);
+ new EntityTableRW().createTable(admin, hbaseConf);
} catch (IOException e) {
if (skipExisting) {
LOG.warn("Skip and continue on: " + e.getMessage());
@@ -312,7 +313,7 @@ public final class TimelineSchemaCreator {
}
}
try {
- new AppToFlowTable().createTable(admin, hbaseConf);
+ new AppToFlowTableRW().createTable(admin, hbaseConf);
} catch (IOException e) {
if (skipExisting) {
LOG.warn("Skip and continue on: " + e.getMessage());
@@ -321,7 +322,7 @@ public final class TimelineSchemaCreator {
}
}
try {
- new ApplicationTable().createTable(admin, hbaseConf);
+ new ApplicationTableRW().createTable(admin, hbaseConf);
} catch (IOException e) {
if (skipExisting) {
LOG.warn("Skip and continue on: " + e.getMessage());
@@ -330,7 +331,7 @@ public final class TimelineSchemaCreator {
}
}
try {
- new FlowRunTable().createTable(admin, hbaseConf);
+ new FlowRunTableRW().createTable(admin, hbaseConf);
} catch (IOException e) {
if (skipExisting) {
LOG.warn("Skip and continue on: " + e.getMessage());
@@ -339,7 +340,7 @@ public final class TimelineSchemaCreator {
}
}
try {
- new FlowActivityTable().createTable(admin, hbaseConf);
+ new FlowActivityTableRW().createTable(admin, hbaseConf);
} catch (IOException e) {
if (skipExisting) {
LOG.warn("Skip and continue on: " + e.getMessage());
@@ -348,7 +349,7 @@ public final class TimelineSchemaCreator {
}
}
try {
- new SubApplicationTable().createTable(admin, hbaseConf);
+ new SubApplicationTableRW().createTable(admin, hbaseConf);
} catch (IOException e) {
if (skipExisting) {
LOG.warn("Skip and continue on: " + e.getMessage());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTableRW.java
similarity index 66%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTableRW.java
index 4da720e7a54..808994e3b0d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTableRW.java
@@ -26,48 +26,15 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * The application table as column families info, config and metrics. Info
- * stores information about a YARN application entity, config stores
- * configuration data of a YARN application, metrics stores the metrics of a
- * YARN application. This table is entirely analogous to the entity table but
- * created for better performance.
- *
- * Example application table record:
- *
- *
- * |-------------------------------------------------------------------------|
- * | Row | Column Family | Column Family| Column Family|
- * | key | info | metrics | config |
- * |-------------------------------------------------------------------------|
- * | clusterId! | id:appId | metricId1: | configKey1: |
- * | userName! | | metricValue1 | configValue1 |
- * | flowName! | created_time: | @timestamp1 | |
- * | flowRunId! | 1392993084018 | | configKey2: |
- * | AppId | | metriciD1: | configValue2 |
- * | | i!infoKey: | metricValue2 | |
- * | | infoValue | @timestamp2 | |
- * | | | | |
- * | | r!relatesToKey: | metricId2: | |
- * | | id3=id4=id5 | metricValue1 | |
- * | | | @timestamp2 | |
- * | | s!isRelatedToKey: | | |
- * | | id7=id9=id6 | | |
- * | | | | |
- * | | e!eventId=timestamp=infoKey: | | |
- * | | eventInfoValue | | |
- * | | | | |
- * | | flowVersion: | | |
- * | | versionValue | | |
- * |-------------------------------------------------------------------------|
- *
+ * Create, read and write to the Application Table.
*/
-public class ApplicationTable extends BaseTable {
+public class ApplicationTableRW extends BaseTableRW {
/** application prefix. */
private static final String PREFIX =
YarnConfiguration.TIMELINE_SERVICE_PREFIX + "application";
@@ -100,9 +67,9 @@ public class ApplicationTable extends BaseTable {
private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000;
private static final Logger LOG =
- LoggerFactory.getLogger(ApplicationTable.class);
+ LoggerFactory.getLogger(ApplicationTableRW.class);
- public ApplicationTable() {
+ public ApplicationTableRW() {
super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME);
}
@@ -110,8 +77,8 @@ public class ApplicationTable extends BaseTable {
* (non-Javadoc)
*
* @see
- * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable
- * (org.apache.hadoop.hbase.client.Admin,
+ * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW#
+ * createTable(org.apache.hadoop.hbase.client.Admin,
* org.apache.hadoop.conf.Configuration)
*/
public void createTable(Admin admin, Configuration hbaseConf)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTableRW.java
similarity index 67%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTableRW.java
index 04da5c70d40..6460203361e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTableRW.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -33,42 +33,9 @@ import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
- * The app_flow table as column families mapping. Mapping stores
- * appId to flowName and flowRunId mapping information
- *
- * Example app_flow table record:
- *
- *
- * |--------------------------------------|
- * | Row | Column Family |
- * | key | mapping |
- * |--------------------------------------|
- * | appId | flow_name!cluster1: |
- * | | foo@daily_hive_report |
- * | | |
- * | | flow_run_id!cluster1: |
- * | | 1452828720457 |
- * | | |
- * | | user_id!cluster1: |
- * | | admin |
- * | | |
- * | | flow_name!cluster2: |
- * | | bar@ad_hoc_query |
- * | | |
- * | | flow_run_id!cluster2: |
- * | | 1452828498752 |
- * | | |
- * | | user_id!cluster2: |
- * | | joe |
- * | | |
- * |--------------------------------------|
- *
- *
- * It is possible (although unlikely) in a multi-cluster environment that there
- * may be more than one applications for a given app id. Different clusters are
- * recorded as different sets of columns.
+ * Create, read and write to the AppToFlow Table.
*/
-public class AppToFlowTable extends BaseTable {
+public class AppToFlowTableRW extends BaseTableRW {
/** app_flow prefix. */
private static final String PREFIX =
YarnConfiguration.TIMELINE_SERVICE_PREFIX + "app-flow";
@@ -80,9 +47,9 @@ public class AppToFlowTable extends BaseTable {
private static final String DEFAULT_TABLE_NAME = "timelineservice.app_flow";
private static final Logger LOG =
- LoggerFactory.getLogger(AppToFlowTable.class);
+ LoggerFactory.getLogger(AppToFlowTableRW.class);
- public AppToFlowTable() {
+ public AppToFlowTableRW() {
super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME);
}
@@ -90,8 +57,8 @@ public class AppToFlowTable extends BaseTable {
* (non-Javadoc)
*
* @see
- * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable
- * (org.apache.hadoop.hbase.client.Admin,
+ * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW#
+ * createTable(org.apache.hadoop.hbase.client.Admin,
* org.apache.hadoop.conf.Configuration)
*/
public void createTable(Admin admin, Configuration hbaseConf)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTableRW.java
similarity index 96%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTableRW.java
index 93d809c003e..12ebce44a47 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTableRW.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
*
* @param reference to the table instance class itself for type safety.
*/
-public abstract class BaseTable {
+public abstract class BaseTableRW> {
/**
* Name of config variable that is used to point to this table.
@@ -56,7 +56,7 @@ public abstract class BaseTable {
* @param defaultTableName Default table name if table from config is not
* found.
*/
- protected BaseTable(String tableNameConfName, String defaultTableName) {
+ protected BaseTableRW(String tableNameConfName, String defaultTableName) {
this.tableNameConfName = tableNameConfName;
this.defaultTableName = defaultTableName;
}
@@ -82,7 +82,7 @@ public abstract class BaseTable {
// This is how service initialization should hang on to this variable, with
// the proper type
TypedBufferedMutator table =
- new BufferedMutatorDelegator(bufferedMutator);
+ new TypedBufferedMutator(bufferedMutator);
return table;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnRWHelper.java
similarity index 51%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnRWHelper.java
index 15c3f9ce04a..a8e5149c3a5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnRWHelper.java
@@ -15,14 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.yarn.server.timelineservice.storage.common;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.NavigableMap;
-import java.util.TreeMap;
+package org.apache.hadoop.yarn.server.timelineservice.storage.common;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -32,109 +26,37 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.TreeMap;
+
/**
- * This class is meant to be used only by explicit Columns, and not directly to
- * write by clients.
- *
- * @param refers to the table.
+ * A set of utility functions that read or read to a column.
+ * This class is meant to be used only by explicit Columns,
+ * and not directly to write by clients.
*/
-public class ColumnHelper {
+public final class ColumnRWHelper {
private static final Logger LOG =
LoggerFactory.getLogger(ColumnHelper.class);
- private final ColumnFamily columnFamily;
-
- /**
- * Local copy of bytes representation of columnFamily so that we can avoid
- * cloning a new copy over and over.
- */
- private final byte[] columnFamilyBytes;
-
- private final ValueConverter converter;
-
- private final boolean supplementTs;
-
- public ColumnHelper(ColumnFamily columnFamily) {
- this(columnFamily, GenericConverter.getInstance());
- }
-
- public ColumnHelper(ColumnFamily columnFamily, ValueConverter converter) {
- this(columnFamily, converter, false);
+ private ColumnRWHelper() {
}
/**
- * @param columnFamily column family implementation.
- * @param converter converter use to encode/decode values stored in the column
- * or column prefix.
- * @param needSupplementTs flag to indicate if cell timestamp needs to be
- * modified for this column by calling
- * {@link TimestampGenerator#getSupplementedTimestamp(long, String)}. This
- * would be required for columns(such as metrics in flow run table) where
- * potential collisions can occur due to same timestamp.
- */
- public ColumnHelper(ColumnFamily columnFamily, ValueConverter converter,
- boolean needSupplementTs) {
- this.columnFamily = columnFamily;
- columnFamilyBytes = columnFamily.getBytes();
- if (converter == null) {
- this.converter = GenericConverter.getInstance();
- } else {
- this.converter = converter;
- }
- this.supplementTs = needSupplementTs;
- }
-
- /**
- * Sends a Mutation to the table. The mutations will be buffered and sent over
- * the wire as part of a batch.
- *
- * @param rowKey
- * identifying the row to write. Nothing gets written when null.
- * @param tableMutator
- * used to modify the underlying HBase table
- * @param columnQualifier
- * column qualifier. Nothing gets written when null.
- * @param timestamp
- * version timestamp. When null the current timestamp multiplied with
- * TimestampGenerator.TS_MULTIPLIER and added with last 3 digits of
- * app id will be used
- * @param inputValue
- * the value to write to the rowKey and column qualifier. Nothing
- * gets written when null.
- * @param attributes Attributes to be set for HBase Put.
- * @throws IOException if any problem occurs during store operation(sending
- * mutation to table).
- */
- public void store(byte[] rowKey, TypedBufferedMutator> tableMutator,
- byte[] columnQualifier, Long timestamp, Object inputValue,
- Attribute... attributes) throws IOException {
- if ((rowKey == null) || (columnQualifier == null) || (inputValue == null)) {
- return;
- }
- Put p = new Put(rowKey);
- timestamp = getPutTimestamp(timestamp, attributes);
- p.addColumn(columnFamilyBytes, columnQualifier, timestamp,
- converter.encodeValue(inputValue));
- if ((attributes != null) && (attributes.length > 0)) {
- for (Attribute attribute : attributes) {
- p.setAttribute(attribute.getName(), attribute.getValue());
- }
- }
- tableMutator.mutate(p);
- }
-
- /*
* Figures out the cell timestamp used in the Put For storing.
* Will supplement the timestamp if required. Typically done for flow run
* table.If we supplement the timestamp, we left shift the timestamp and
* supplement it with the AppId id so that there are no collisions in the flow
* run table's cells.
*/
- private long getPutTimestamp(Long timestamp, Attribute[] attributes) {
+ private static long getPutTimestamp(
+ Long timestamp, boolean supplementTs, Attribute[] attributes) {
if (timestamp == null) {
timestamp = System.currentTimeMillis();
}
- if (!this.supplementTs) {
+ if (!supplementTs) {
return timestamp;
} else {
String appId = getAppIdFromAttributes(attributes);
@@ -144,7 +66,7 @@ public class ColumnHelper {
}
}
- private String getAppIdFromAttributes(Attribute[] attributes) {
+ private static String getAppIdFromAttributes(Attribute[] attributes) {
if (attributes == null) {
return null;
}
@@ -159,10 +81,76 @@ public class ColumnHelper {
}
/**
- * @return the column family for this column implementation.
+ * Sends a Mutation to the table. The mutations will be buffered and sent over
+ * the wire as part of a batch.
+ *
+ * @param rowKey
+ * identifying the row to write. Nothing gets written when null.
+ * @param tableMutator
+ * used to modify the underlying HBase table
+ * @param column the column that is to be modified
+ * @param timestamp
+ * version timestamp. When null the current timestamp multiplied with
+ * TimestampGenerator.TS_MULTIPLIER and added with last 3 digits of
+ * app id will be used
+ * @param inputValue
+ * the value to write to the rowKey and column qualifier. Nothing
+ * gets written when null.
+ * @param attributes Attributes to be set for HBase Put.
+ * @throws IOException if any problem occurs during store operation(sending
+ * mutation to table).
*/
- public ColumnFamily getColumnFamily() {
- return columnFamily;
+ public static void store(byte[] rowKey, TypedBufferedMutator> tableMutator,
+ Column> column, Long timestamp,
+ Object inputValue, Attribute... attributes)
+ throws IOException {
+ store(rowKey, tableMutator, column.getColumnFamilyBytes(),
+ column.getColumnQualifierBytes(), timestamp,
+ column.supplementCellTimestamp(), inputValue,
+ column.getValueConverter(),
+ column.getCombinedAttrsWithAggr(attributes));
+ }
+
+ /**
+ * Sends a Mutation to the table. The mutations will be buffered and sent over
+ * the wire as part of a batch.
+ *
+ * @param rowKey
+ * identifying the row to write. Nothing gets written when null.
+ * @param tableMutator
+ * used to modify the underlying HBase table
+ * @param columnFamilyBytes
+ * @param columnQualifier
+ * column qualifier. Nothing gets written when null.
+ * @param timestamp
+ * version timestamp. When null the current timestamp multiplied with
+ * TimestampGenerator.TS_MULTIPLIER and added with last 3 digits of
+ * app id will be used
+ * @param inputValue
+ * the value to write to the rowKey and column qualifier. Nothing
+ * gets written when null.
+ * @param converter
+ * @param attributes Attributes to be set for HBase Put.
+ * @throws IOException if any problem occurs during store operation(sending
+ * mutation to table).
+ */
+ public static void store(byte[] rowKey, TypedBufferedMutator> tableMutator,
+ byte[] columnFamilyBytes, byte[] columnQualifier, Long timestamp,
+ boolean supplementTs, Object inputValue, ValueConverter converter,
+ Attribute... attributes) throws IOException {
+ if ((rowKey == null) || (columnQualifier == null) || (inputValue == null)) {
+ return;
+ }
+ Put p = new Put(rowKey);
+ timestamp = getPutTimestamp(timestamp, supplementTs, attributes);
+ p.addColumn(columnFamilyBytes, columnQualifier, timestamp,
+ converter.encodeValue(inputValue));
+ if ((attributes != null) && (attributes.length > 0)) {
+ for (Attribute attribute : attributes) {
+ p.setAttribute(attribute.getName(), attribute.getValue());
+ }
+ }
+ tableMutator.mutate(p);
}
/**
@@ -170,12 +158,15 @@ public class ColumnHelper {
* value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}.
*
* @param result from which to read the value. Cannot be null
+ * @param columnFamilyBytes
* @param columnQualifierBytes referring to the column to be read.
+ * @param converter
* @return latest version of the specified column of whichever object was
* written.
* @throws IOException if any problem occurs while reading result.
*/
- public Object readResult(Result result, byte[] columnQualifierBytes)
+ public static Object readResult(Result result, byte[] columnFamilyBytes,
+ byte[] columnQualifierBytes, ValueConverter converter)
throws IOException {
if (result == null || columnQualifierBytes == null) {
return null;
@@ -188,6 +179,87 @@ public class ColumnHelper {
return converter.decodeValue(value);
}
+ /**
+ * Get the latest version of this specified column. Note: this call clones the
+ * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}.
+ *
+ * @param result from which to read the value. Cannot be null
+ * @param column the column that the result can be parsed to
+ * @return latest version of the specified column of whichever object was
+ * written.
+ * @throws IOException if any problem occurs while reading result.
+ */
+ public static Object readResult(Result result, Column> column)
+ throws IOException {
+ return readResult(result, column.getColumnFamilyBytes(),
+ column.getColumnQualifierBytes(), column.getValueConverter());
+ }
+
+ /**
+ * Get the latest version of this specified column. Note: this call clones the
+ * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}.
+ *
+ * @param result Cannot be null
+ * @param columnPrefix column prefix to read from
+ * @param qualifier column qualifier. Nothing gets read when null.
+ * @return result object (can be cast to whatever object was written to) or
+ * null when specified column qualifier for this prefix doesn't exist
+ * in the result.
+ * @throws IOException if there is any exception encountered while reading
+ * result.
+ */
+ public static Object readResult(Result result, ColumnPrefix> columnPrefix,
+ String qualifier) throws IOException {
+ byte[] columnQualifier = ColumnHelper.getColumnQualifier(
+ columnPrefix.getColumnPrefixInBytes(), qualifier);
+
+ return readResult(
+ result, columnPrefix.getColumnFamilyBytes(),
+ columnQualifier, columnPrefix.getValueConverter());
+ }
+
+ /**
+ *
+ * @param identifies the type of key converter.
+ * @param result from which to read columns.
+ * @param keyConverter used to convert column bytes to the appropriate key
+ * type
+ * @return the latest values of columns in the column family with this prefix
+ * (or all of them if the prefix value is null).
+ * @throws IOException if there is any exception encountered while reading
+ * results.
+ */
+ public static Map readResults(Result result,
+ ColumnPrefix> columnPrefix, KeyConverter keyConverter)
+ throws IOException {
+ return readResults(result,
+ columnPrefix.getColumnFamilyBytes(),
+ columnPrefix.getColumnPrefixInBytes(),
+ keyConverter, columnPrefix.getValueConverter());
+ }
+
+ /**
+ * @param result from which to reads data with timestamps.
+ * @param identifies the type of key converter.
+ * @param the type of the values. The values will be cast into that type.
+ * @param keyConverter used to convert column bytes to the appropriate key
+ * type.
+ * @return the cell values at each respective time in for form
+ * {@literal {idA={timestamp1->value1}, idA={timestamp2->value2},
+ * idB={timestamp3->value3}, idC={timestamp1->value4}}}
+ * @throws IOException if there is any exception encountered while reading
+ * result.
+ */
+ public static NavigableMap>
+ readResultsWithTimestamps(Result result, ColumnPrefix> columnPrefix,
+ KeyConverter keyConverter) throws IOException {
+ return readResultsWithTimestamps(result,
+ columnPrefix.getColumnFamilyBytes(),
+ columnPrefix.getColumnPrefixInBytes(),
+ keyConverter, columnPrefix.getValueConverter(),
+ columnPrefix.supplementCellTimeStamp());
+ }
+
/**
* @param result from which to reads data with timestamps
* @param columnPrefixBytes optional prefix to limit columns. If null all
@@ -203,22 +275,24 @@ public class ColumnHelper {
* @throws IOException if any problem occurs while reading results.
*/
@SuppressWarnings("unchecked")
- public NavigableMap>
- readResultsWithTimestamps(Result result, byte[] columnPrefixBytes,
- KeyConverter keyConverter) throws IOException {
+ public static NavigableMap>
+ readResultsWithTimestamps(Result result, byte[] columnFamilyBytes,
+ byte[] columnPrefixBytes, KeyConverter keyConverter,
+ ValueConverter valueConverter, boolean supplementTs)
+ throws IOException {
NavigableMap> results = new TreeMap<>();
if (result != null) {
NavigableMap<
byte[], NavigableMap>> resultMap =
- result.getMap();
+ result.getMap();
NavigableMap> columnCellMap =
resultMap.get(columnFamilyBytes);
// could be that there is no such column family.
if (columnCellMap != null) {
- for (Entry> entry : columnCellMap
+ for (Map.Entry> entry : columnCellMap
.entrySet()) {
K converterColumnKey = null;
if (columnPrefixBytes == null) {
@@ -255,10 +329,10 @@ public class ColumnHelper {
new TreeMap();
NavigableMap cells = entry.getValue();
if (cells != null) {
- for (Entry cell : cells.entrySet()) {
+ for (Map.Entry cell : cells.entrySet()) {
V value =
- (V) converter.decodeValue(cell.getValue());
- long ts = supplementTs ? TimestampGenerator.
+ (V) valueConverter.decodeValue(cell.getValue());
+ Long ts = supplementTs ? TimestampGenerator.
getTruncatedTimestamp(cell.getKey()) : cell.getKey();
cellResults.put(ts, value);
}
@@ -286,14 +360,15 @@ public class ColumnHelper {
* returning byte arrays of values that were not Strings.
* @throws IOException if any problem occurs while reading results.
*/
- public Map readResults(Result result,
- byte[] columnPrefixBytes, KeyConverter keyConverter)
+ public static Map readResults(Result result,
+ byte[] columnFamilyBytes, byte[] columnPrefixBytes,
+ KeyConverter keyConverter, ValueConverter valueConverter)
throws IOException {
Map results = new HashMap();
if (result != null) {
Map columns = result.getFamilyMap(columnFamilyBytes);
- for (Entry entry : columns.entrySet()) {
+ for (Map.Entry entry : columns.entrySet()) {
byte[] columnKey = entry.getKey();
if (columnKey != null && columnKey.length > 0) {
@@ -327,7 +402,7 @@ public class ColumnHelper {
// If the columnPrefix is null (we want all columns), or the actual
// prefix matches the given prefix we want this column
if (converterColumnKey != null) {
- Object value = converter.decodeValue(entry.getValue());
+ Object value = valueConverter.decodeValue(entry.getValue());
// we return the columnQualifier in parts since we don't know
// which part is of which data type.
results.put(converterColumnKey, value);
@@ -339,76 +414,74 @@ public class ColumnHelper {
}
/**
- * @param columnPrefixBytes The byte representation for the column prefix.
- * Should not contain {@link Separator#QUALIFIERS}.
- * @param qualifier for the remainder of the column.
- * {@link Separator#QUALIFIERS} is permissible in the qualifier
- * as it is joined only with the column prefix bytes.
- * @return fully sanitized column qualifier that is a combination of prefix
- * and qualifier. If prefix is null, the result is simply the encoded
- * qualifier without any separator.
+ * Sends a Mutation to the table. The mutations will be buffered and sent over
+ * the wire as part of a batch.
+ *
+ * @param rowKey identifying the row to write. Nothing gets written when null.
+ * @param tableMutator used to modify the underlying HBase table. Caller is
+ * responsible to pass a mutator for the table that actually has this
+ * column.
+ * @param qualifier column qualifier. Nothing gets written when null.
+ * @param timestamp version timestamp. When null the server timestamp will be
+ * used.
+ * @param attributes attributes for the mutation that are used by the
+ * coprocessor to set/read the cell tags.
+ * @param inputValue the value to write to the rowKey and column qualifier.
+ * Nothing gets written when null.
+ * @throws IOException if there is any exception encountered while doing
+ * store operation(sending mutation to the table).
*/
- public static byte[] getColumnQualifier(byte[] columnPrefixBytes,
- String qualifier) {
-
- // We don't want column names to have spaces / tabs.
- byte[] encodedQualifier =
- Separator.encode(qualifier, Separator.SPACE, Separator.TAB);
- if (columnPrefixBytes == null) {
- return encodedQualifier;
+ public static void store(byte[] rowKey, TypedBufferedMutator> tableMutator,
+ ColumnPrefix> columnPrefix, byte[] qualifier, Long timestamp,
+ Object inputValue, Attribute... attributes) throws IOException {
+ // Null check
+ if (qualifier == null) {
+ throw new IOException("Cannot store column with null qualifier in "
+ +tableMutator.getName().getNameAsString());
}
- // Convert qualifier to lower case, strip of separators and tag on column
- // prefix.
- byte[] columnQualifier =
- Separator.QUALIFIERS.join(columnPrefixBytes, encodedQualifier);
- return columnQualifier;
+ byte[] columnQualifier = columnPrefix.getColumnPrefixBytes(qualifier);
+ Attribute[] combinedAttributes =
+ columnPrefix.getCombinedAttrsWithAggr(attributes);
+
+ store(rowKey, tableMutator, columnPrefix.getColumnFamilyBytes(),
+ columnQualifier, timestamp, columnPrefix.supplementCellTimeStamp(),
+ inputValue, columnPrefix.getValueConverter(), combinedAttributes);
}
/**
- * @param columnPrefixBytes The byte representation for the column prefix.
- * Should not contain {@link Separator#QUALIFIERS}.
- * @param qualifier for the remainder of the column.
- * @return fully sanitized column qualifier that is a combination of prefix
- * and qualifier. If prefix is null, the result is simply the encoded
- * qualifier without any separator.
+ * Sends a Mutation to the table. The mutations will be buffered and sent over
+ * the wire as part of a batch.
+ *
+ * @param rowKey identifying the row to write. Nothing gets written when null.
+ * @param tableMutator used to modify the underlying HBase table. Caller is
+ * responsible to pass a mutator for the table that actually has this
+ * column.
+ * @param qualifier column qualifier. Nothing gets written when null.
+ * @param timestamp version timestamp. When null the server timestamp will be
+ * used.
+ * @param attributes attributes for the mutation that are used by the
+ * coprocessor to set/read the cell tags.
+ * @param inputValue the value to write to the rowKey and column qualifier.
+ * Nothing gets written when null.
+ * @throws IOException if there is any exception encountered while doing
+ * store operation(sending mutation to the table).
*/
- public static byte[] getColumnQualifier(byte[] columnPrefixBytes,
- long qualifier) {
-
- if (columnPrefixBytes == null) {
- return Bytes.toBytes(qualifier);
+ public static void store(byte[] rowKey, TypedBufferedMutator> tableMutator,
+ ColumnPrefix> columnPrefix, String qualifier, Long timestamp,
+ Object inputValue, Attribute... attributes) throws IOException {
+ // Null check
+ if (qualifier == null) {
+ throw new IOException("Cannot store column with null qualifier in "
+ + tableMutator.getName().getNameAsString());
}
- // Convert qualifier to lower case, strip of separators and tag on column
- // prefix.
- byte[] columnQualifier =
- Separator.QUALIFIERS.join(columnPrefixBytes, Bytes.toBytes(qualifier));
- return columnQualifier;
+ byte[] columnQualifier = columnPrefix.getColumnPrefixBytes(qualifier);
+ Attribute[] combinedAttributes =
+ columnPrefix.getCombinedAttrsWithAggr(attributes);
+
+ store(rowKey, tableMutator, columnPrefix.getColumnFamilyBytes(),
+ columnQualifier, timestamp, columnPrefix.supplementCellTimeStamp(),
+ inputValue, columnPrefix.getValueConverter(), combinedAttributes);
}
-
- public ValueConverter getValueConverter() {
- return converter;
- }
-
- /**
- * @param columnPrefixBytes The byte representation for the column prefix.
- * Should not contain {@link Separator#QUALIFIERS}.
- * @param qualifier the byte representation for the remainder of the column.
- * @return fully sanitized column qualifier that is a combination of prefix
- * and qualifier. If prefix is null, the result is simply the encoded
- * qualifier without any separator.
- */
- public static byte[] getColumnQualifier(byte[] columnPrefixBytes,
- byte[] qualifier) {
-
- if (columnPrefixBytes == null) {
- return qualifier;
- }
-
- byte[] columnQualifier =
- Separator.QUALIFIERS.join(columnPrefixBytes, qualifier);
- return columnQualifier;
- }
-
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
new file mode 100644
index 00000000000..36e86cd00a4
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.storage.common;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Arrays;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Query;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * A bunch of utility functions used in HBase TimelineService backend.
+ */
+public final class HBaseTimelineStorageUtils {
+
+ private static final Logger LOG =
+ LoggerFactory.getLogger(HBaseTimelineStorageUtils.class);
+ /** milliseconds in one day. */
+ public static final long MILLIS_ONE_DAY = 86400000L;
+
+ private HBaseTimelineStorageUtils() {
+ }
+
+ /**
+ * @param conf YARN configuration. Used to see if there is an explicit config
+ * pointing to the HBase config file to read. It should not be null
+ * or a NullPointerException will be thrown.
+ * @return a configuration with the HBase configuration from the classpath,
+ * optionally overwritten by the timeline service configuration URL if
+ * specified.
+ * @throws MalformedURLException if a timeline service HBase configuration URL
+ * is specified but is a malformed URL.
+ */
+ public static Configuration getTimelineServiceHBaseConf(Configuration conf)
+ throws MalformedURLException {
+ if (conf == null) {
+ throw new NullPointerException();
+ }
+
+ Configuration hbaseConf;
+ String timelineServiceHBaseConfFileURL =
+ conf.get(YarnConfiguration.TIMELINE_SERVICE_HBASE_CONFIGURATION_FILE);
+ if (timelineServiceHBaseConfFileURL != null
+ && timelineServiceHBaseConfFileURL.length() > 0) {
+ LOG.info("Using hbase configuration at " +
+ timelineServiceHBaseConfFileURL);
+ // create a clone so that we don't mess with out input one
+ hbaseConf = new Configuration(conf);
+ Configuration plainHBaseConf = new Configuration(false);
+ URL hbaseSiteXML = new URL(timelineServiceHBaseConfFileURL);
+ plainHBaseConf.addResource(hbaseSiteXML);
+ HBaseConfiguration.merge(hbaseConf, plainHBaseConf);
+ } else {
+ // default to what is on the classpath
+ hbaseConf = HBaseConfiguration.create(conf);
+ }
+ return hbaseConf;
+ }
+
+ /**
+ * Given a row key prefix stored in a byte array, return a byte array for its
+ * immediate next row key.
+ *
+ * @param rowKeyPrefix The provided row key prefix, represented in an array.
+ * @return the closest next row key of the provided row key.
+ */
+ public static byte[] calculateTheClosestNextRowKeyForPrefix(
+ byte[] rowKeyPrefix) {
+ // Essentially we are treating it like an 'unsigned very very long' and
+ // doing +1 manually.
+ // Search for the place where the trailing 0xFFs start
+ int offset = rowKeyPrefix.length;
+ while (offset > 0) {
+ if (rowKeyPrefix[offset - 1] != (byte) 0xFF) {
+ break;
+ }
+ offset--;
+ }
+
+ if (offset == 0) {
+ // We got an 0xFFFF... (only FFs) stopRow value which is
+ // the last possible prefix before the end of the table.
+ // So set it to stop at the 'end of the table'
+ return HConstants.EMPTY_END_ROW;
+ }
+
+ // Copy the right length of the original
+ byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset);
+ // And increment the last one
+ newStopRow[newStopRow.length - 1]++;
+ return newStopRow;
+ }
+
+ public static void setMetricsTimeRange(Query query, byte[] metricsCf,
+ long tsBegin, long tsEnd) {
+ if (tsBegin != 0 || tsEnd != Long.MAX_VALUE) {
+ query.setColumnFamilyTimeRange(metricsCf,
+ tsBegin, ((tsEnd == Long.MAX_VALUE) ? Long.MAX_VALUE : (tsEnd + 1)));
+ }
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java
similarity index 93%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java
index cf469a54282..29a07e4d95d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TypedBufferedMutator.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.client.Mutation;
*
* @param The class referring to the table to be written to.
*/
-class BufferedMutatorDelegator implements TypedBufferedMutator {
+public class TypedBufferedMutator> {
private final BufferedMutator bufferedMutator;
@@ -38,7 +38,7 @@ class BufferedMutatorDelegator implements TypedBufferedMutator {
* @param bufferedMutator the mutator to be wrapped for delegation. Shall not
* be null.
*/
- public BufferedMutatorDelegator(BufferedMutator bufferedMutator) {
+ public TypedBufferedMutator(BufferedMutator bufferedMutator) {
this.bufferedMutator = bufferedMutator;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTableRW.java
similarity index 65%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTableRW.java
index 988bba2852f..111ae711b3f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTableRW.java
@@ -26,49 +26,15 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * The entity table as column families info, config and metrics. Info stores
- * information about a timeline entity object config stores configuration data
- * of a timeline entity object metrics stores the metrics of a timeline entity
- * object
- *
- * Example entity table record:
- *
- *
- * |-------------------------------------------------------------------------|
- * | Row | Column Family | Column Family| Column Family|
- * | key | info | metrics | config |
- * |-------------------------------------------------------------------------|
- * | userName! | id:entityId | metricId1: | configKey1: |
- * | clusterId! | | metricValue1 | configValue1 |
- * | flowName! | type:entityType | @timestamp1 | |
- * | flowRunId! | | | configKey2: |
- * | AppId! | created_time: | metricId1: | configValue2 |
- * | entityType!| 1392993084018 | metricValue2 | |
- * | idPrefix! | | @timestamp2 | |
- * | entityId | i!infoKey: | | |
- * | | infoValue | metricId1: | |
- * | | | metricValue1 | |
- * | | r!relatesToKey: | @timestamp2 | |
- * | | id3=id4=id5 | | |
- * | | | | |
- * | | s!isRelatedToKey | | |
- * | | id7=id9=id6 | | |
- * | | | | |
- * | | e!eventId=timestamp=infoKey: | | |
- * | | eventInfoValue | | |
- * | | | | |
- * | | flowVersion: | | |
- * | | versionValue | | |
- * |-------------------------------------------------------------------------|
- *
+ * Create, read and write to the Entity Table.
*/
-public class EntityTable extends BaseTable {
+public class EntityTableRW extends BaseTableRW {
/** entity prefix. */
private static final String PREFIX =
YarnConfiguration.TIMELINE_SERVICE_PREFIX + "entity";
@@ -100,9 +66,9 @@ public class EntityTable extends BaseTable {
private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000;
private static final Logger LOG =
- LoggerFactory.getLogger(EntityTable.class);
+ LoggerFactory.getLogger(EntityTableRW.class);
- public EntityTable() {
+ public EntityTableRW() {
super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME);
}
@@ -110,8 +76,8 @@ public class EntityTable extends BaseTable {
* (non-Javadoc)
*
* @see
- * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable
- * (org.apache.hadoop.hbase.client.Admin,
+ * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW#
+ * createTable(org.apache.hadoop.hbase.client.Admin,
* org.apache.hadoop.conf.Configuration)
*/
public void createTable(Admin admin, Configuration hbaseConf)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTableRW.java
similarity index 76%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTableRW.java
index e646eb26a56..5b9fe13a883 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTableRW.java
@@ -26,32 +26,14 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * The flow activity table has column family info
- * Stores the daily activity record for flows
- * Useful as a quick lookup of what flows were
- * running on a given day
- *
- * Example flow activity table record:
- *
- *
- * |-------------------------------------------|
- * | Row key | Column Family |
- * | | info |
- * |-------------------------------------------|
- * | clusterId! | r!runid1:version1 |
- * | inv Top of | |
- * | Day! | r!runid2:version7 |
- * | userName! | |
- * | flowName | |
- * |-------------------------------------------|
- *
+ * Create, read and write to the FlowActivity Table.
*/
-public class FlowActivityTable extends BaseTable {
+public class FlowActivityTableRW extends BaseTableRW {
/** flow activity table prefix. */
private static final String PREFIX =
YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".flowactivity";
@@ -64,12 +46,12 @@ public class FlowActivityTable extends BaseTable {
"timelineservice.flowactivity";
private static final Logger LOG =
- LoggerFactory.getLogger(FlowActivityTable.class);
+ LoggerFactory.getLogger(FlowActivityTableRW.class);
/** default max number of versions. */
public static final int DEFAULT_METRICS_MAX_VERSIONS = Integer.MAX_VALUE;
- public FlowActivityTable() {
+ public FlowActivityTableRW() {
super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME);
}
@@ -77,8 +59,8 @@ public class FlowActivityTable extends BaseTable {
* (non-Javadoc)
*
* @see
- * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable
- * (org.apache.hadoop.hbase.client.Admin,
+ * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW#
+ * createTable(org.apache.hadoop.hbase.client.Admin,
* org.apache.hadoop.conf.Configuration)
*/
public void createTable(Admin admin, Configuration hbaseConf)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTableRW.java
similarity index 58%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTableRW.java
index a1d32ee78cd..61c073432a5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTableRW.java
@@ -26,66 +26,16 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor;
/**
- * The flow run table has column family info
- * Stores per flow run information
- * aggregated across applications.
- *
- * Metrics are also stored in the info column family.
- *
- * Example flow run table record:
- *
- *
- * flow_run table
- * |-------------------------------------------|
- * | Row key | Column Family |
- * | | info |
- * |-------------------------------------------|
- * | clusterId! | flow_version:version7 |
- * | userName! | |
- * | flowName! | running_apps:1 |
- * | flowRunId | |
- * | | min_start_time:1392995080000 |
- * | | #0:"" |
- * | | |
- * | | min_start_time:1392995081012 |
- * | | #0:appId2 |
- * | | |
- * | | min_start_time:1392993083210 |
- * | | #0:appId3 |
- * | | |
- * | | |
- * | | max_end_time:1392993084018 |
- * | | #0:"" |
- * | | |
- * | | |
- * | | m!mapInputRecords:127 |
- * | | #0:"" |
- * | | |
- * | | m!mapInputRecords:31 |
- * | | #2:appId2 |
- * | | |
- * | | m!mapInputRecords:37 |
- * | | #1:appId3 |
- * | | |
- * | | |
- * | | m!mapOutputRecords:181 |
- * | | #0:"" |
- * | | |
- * | | m!mapOutputRecords:37 |
- * | | #1:appId3 |
- * | | |
- * | | |
- * |-------------------------------------------|
- *
+ * Create, read and write to the FlowRun table.
*/
-public class FlowRunTable extends BaseTable {
+public class FlowRunTableRW extends BaseTableRW {
/** entity prefix. */
private static final String PREFIX =
YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".flowrun";
@@ -97,12 +47,12 @@ public class FlowRunTable extends BaseTable {
public static final String DEFAULT_TABLE_NAME = "timelineservice.flowrun";
private static final Logger LOG =
- LoggerFactory.getLogger(FlowRunTable.class);
+ LoggerFactory.getLogger(FlowRunTableRW.class);
/** default max number of versions. */
public static final int DEFAULT_METRICS_MAX_VERSIONS = Integer.MAX_VALUE;
- public FlowRunTable() {
+ public FlowRunTableRW() {
super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME);
}
@@ -110,8 +60,8 @@ public class FlowRunTable extends BaseTable {
* (non-Javadoc)
*
* @see
- * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable
- * (org.apache.hadoop.hbase.client.Admin,
+ * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW#
+ * createTable(org.apache.hadoop.hbase.client.Admin,
* org.apache.hadoop.conf.Configuration)
*/
public void createTable(Admin admin, Configuration hbaseConf)
@@ -142,7 +92,8 @@ public class FlowRunTable extends BaseTable {
Path coprocessorJarPath = new Path(coprocessorJarPathStr);
LOG.info("CoprocessorJarPath=" + coprocessorJarPath.toString());
flowRunTableDescp.addCoprocessor(
- FlowRunCoprocessor.class.getCanonicalName(), coprocessorJarPath,
+ "org.apache.hadoop.yarn.server.timelineservice.storage." +
+ "flow.FlowRunCoprocessor", coprocessorJarPath,
Coprocessor.PRIORITY_USER, null);
admin.createTable(flowRunTableDescp);
LOG.info("Status of table creation for " + table.getNameAsString() + "="
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java
similarity index 91%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java
index 5bacf66fb45..0956f1e5676 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/AbstractTimelineStorageReader.java
@@ -26,7 +26,8 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey;
-import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
import org.apache.hadoop.yarn.webapp.NotFoundException;
/**
@@ -39,7 +40,7 @@ public abstract class AbstractTimelineStorageReader {
/**
* Used to look up the flow context.
*/
- private final AppToFlowTable appToFlowTable = new AppToFlowTable();
+ private final AppToFlowTableRW appToFlowTable = new AppToFlowTableRW();
public AbstractTimelineStorageReader(TimelineReaderContext ctxt) {
context = ctxt;
@@ -66,12 +67,12 @@ public abstract class AbstractTimelineStorageReader {
Get get = new Get(rowKey);
Result result = appToFlowTable.getResult(hbaseConf, conn, get);
if (result != null && !result.isEmpty()) {
- Object flowName =
- AppToFlowColumnPrefix.FLOW_NAME.readResult(result, clusterId);
- Object flowRunId =
- AppToFlowColumnPrefix.FLOW_RUN_ID.readResult(result, clusterId);
- Object userId =
- AppToFlowColumnPrefix.USER_ID.readResult(result, clusterId);
+ Object flowName = ColumnRWHelper.readResult(
+ result, AppToFlowColumnPrefix.FLOW_NAME, clusterId);
+ Object flowRunId = ColumnRWHelper.readResult(
+ result, AppToFlowColumnPrefix.FLOW_RUN_ID, clusterId);
+ Object userId = ColumnRWHelper.readResult(
+ result, AppToFlowColumnPrefix.USER_ID, clusterId);
if (flowName == null || userId == null || flowRunId == null) {
throw new NotFoundException(
"Unable to find the context flow name, and flow run id, "
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
similarity index 97%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
index 0edd6a52680..7440316c9e2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
@@ -49,8 +49,9 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.application.Applica
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
@@ -63,23 +64,23 @@ import com.google.common.base.Preconditions;
* application table.
*/
class ApplicationEntityReader extends GenericEntityReader {
- private static final ApplicationTable APPLICATION_TABLE =
- new ApplicationTable();
+ private static final ApplicationTableRW APPLICATION_TABLE =
+ new ApplicationTableRW();
- public ApplicationEntityReader(TimelineReaderContext ctxt,
+ ApplicationEntityReader(TimelineReaderContext ctxt,
TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) {
super(ctxt, entityFilters, toRetrieve);
}
- public ApplicationEntityReader(TimelineReaderContext ctxt,
+ ApplicationEntityReader(TimelineReaderContext ctxt,
TimelineDataToRetrieve toRetrieve) {
super(ctxt, toRetrieve);
}
/**
- * Uses the {@link ApplicationTable}.
+ * Uses the {@link ApplicationTableRW}.
*/
- protected BaseTable> getTable() {
+ protected BaseTableRW> getTable() {
return APPLICATION_TABLE;
}
@@ -430,12 +431,14 @@ class ApplicationEntityReader extends GenericEntityReader {
}
TimelineEntity entity = new TimelineEntity();
entity.setType(TimelineEntityType.YARN_APPLICATION.toString());
- String entityId = ApplicationColumn.ID.readResult(result).toString();
+ String entityId =
+ ColumnRWHelper.readResult(result, ApplicationColumn.ID).toString();
entity.setId(entityId);
TimelineEntityFilters filters = getFilters();
// fetch created time
- Long createdTime = (Long) ApplicationColumn.CREATED_TIME.readResult(result);
+ Long createdTime = (Long) ColumnRWHelper.readResult(result,
+ ApplicationColumn.CREATED_TIME);
entity.setCreatedTime(createdTime);
EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java
similarity index 98%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java
index 0764775b968..5d5a653d613 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/EntityTypeReader.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelin
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,7 +51,7 @@ public final class EntityTypeReader extends AbstractTimelineStorageReader {
private static final Logger LOG =
LoggerFactory.getLogger(EntityTypeReader.class);
- private static final EntityTable ENTITY_TABLE = new EntityTable();
+ private static final EntityTableRW ENTITY_TABLE = new EntityTableRW();
public EntityTypeReader(TimelineReaderContext context) {
super(context);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
similarity index 92%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
index a1cdb29b124..d0a0f3bb46e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
@@ -35,13 +35,14 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrie
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderUtils;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongKeyConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException;
import com.google.common.base.Preconditions;
@@ -51,8 +52,8 @@ import com.google.common.base.Preconditions;
* flow activity table.
*/
class FlowActivityEntityReader extends TimelineEntityReader {
- private static final FlowActivityTable FLOW_ACTIVITY_TABLE =
- new FlowActivityTable();
+ private static final FlowActivityTableRW FLOW_ACTIVITY_TABLE =
+ new FlowActivityTableRW();
/**
* Used to convert Long key components to and from storage format.
@@ -60,21 +61,21 @@ class FlowActivityEntityReader extends TimelineEntityReader {
private final KeyConverter longKeyConverter = new LongKeyConverter();
- public FlowActivityEntityReader(TimelineReaderContext ctxt,
+ FlowActivityEntityReader(TimelineReaderContext ctxt,
TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) {
super(ctxt, entityFilters, toRetrieve);
}
- public FlowActivityEntityReader(TimelineReaderContext ctxt,
+ FlowActivityEntityReader(TimelineReaderContext ctxt,
TimelineDataToRetrieve toRetrieve) {
super(ctxt, toRetrieve);
}
/**
- * Uses the {@link FlowActivityTable}.
+ * Uses the {@link FlowActivityTableRW}.
*/
@Override
- protected BaseTable> getTable() {
+ protected BaseTableRW> getTable() {
return FLOW_ACTIVITY_TABLE;
}
@@ -164,8 +165,8 @@ class FlowActivityEntityReader extends TimelineEntityReader {
flowActivity.setId(flowActivity.getId());
// get the list of run ids along with the version that are associated with
// this flow on this day
- Map runIdsMap =
- FlowActivityColumnPrefix.RUN_ID.readResults(result, longKeyConverter);
+ Map runIdsMap = ColumnRWHelper.readResults(result,
+ FlowActivityColumnPrefix.RUN_ID, longKeyConverter);
for (Map.Entry e : runIdsMap.entrySet()) {
Long runId = e.getKey();
String version = (String)e.getValue();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
similarity index 94%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
index af043b3bc7a..33a2cf67a27 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
@@ -43,7 +43,8 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderUtils;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumn;
@@ -51,7 +52,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnF
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException;
import com.google.common.base.Preconditions;
@@ -61,23 +62,23 @@ import com.google.common.base.Preconditions;
* table.
*/
class FlowRunEntityReader extends TimelineEntityReader {
- private static final FlowRunTable FLOW_RUN_TABLE = new FlowRunTable();
+ private static final FlowRunTableRW FLOW_RUN_TABLE = new FlowRunTableRW();
- public FlowRunEntityReader(TimelineReaderContext ctxt,
+ FlowRunEntityReader(TimelineReaderContext ctxt,
TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) {
super(ctxt, entityFilters, toRetrieve);
}
- public FlowRunEntityReader(TimelineReaderContext ctxt,
+ FlowRunEntityReader(TimelineReaderContext ctxt,
TimelineDataToRetrieve toRetrieve) {
super(ctxt, toRetrieve);
}
/**
- * Uses the {@link FlowRunTable}.
+ * Uses the {@link FlowRunTableRW}.
*/
@Override
- protected BaseTable> getTable() {
+ protected BaseTableRW> getTable() {
return FLOW_RUN_TABLE;
}
@@ -261,19 +262,22 @@ class FlowRunEntityReader extends TimelineEntityReader {
flowRun.setName(rowKey.getFlowName());
// read the start time
- Long startTime = (Long) FlowRunColumn.MIN_START_TIME.readResult(result);
+ Long startTime = (Long) ColumnRWHelper.readResult(result,
+ FlowRunColumn.MIN_START_TIME);
if (startTime != null) {
flowRun.setStartTime(startTime.longValue());
}
// read the end time if available
- Long endTime = (Long) FlowRunColumn.MAX_END_TIME.readResult(result);
+ Long endTime = (Long) ColumnRWHelper.readResult(result,
+ FlowRunColumn.MAX_END_TIME);
if (endTime != null) {
flowRun.setMaxEndTime(endTime.longValue());
}
// read the flow version
- String version = (String) FlowRunColumn.FLOW_VERSION.readResult(result);
+ String version = (String) ColumnRWHelper.readResult(result,
+ FlowRunColumn.FLOW_VERSION);
if (version != null) {
flowRun.setVersion(version);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
similarity index 97%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
index 3a444455bd9..02eca84f1f2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
@@ -46,7 +46,9 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilte
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix;
@@ -57,7 +59,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumn
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException;
import com.google.common.base.Preconditions;
@@ -67,7 +69,7 @@ import com.google.common.base.Preconditions;
* table.
*/
class GenericEntityReader extends TimelineEntityReader {
- private static final EntityTable ENTITY_TABLE = new EntityTable();
+ private static final EntityTableRW ENTITY_TABLE = new EntityTableRW();
/**
* Used to convert strings key components to and from storage format.
@@ -75,20 +77,20 @@ class GenericEntityReader extends TimelineEntityReader {
private final KeyConverter stringKeyConverter =
new StringKeyConverter();
- public GenericEntityReader(TimelineReaderContext ctxt,
+ GenericEntityReader(TimelineReaderContext ctxt,
TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) {
super(ctxt, entityFilters, toRetrieve);
}
- public GenericEntityReader(TimelineReaderContext ctxt,
+ GenericEntityReader(TimelineReaderContext ctxt,
TimelineDataToRetrieve toRetrieve) {
super(ctxt, toRetrieve);
}
/**
- * Uses the {@link EntityTable}.
+ * Uses the {@link EntityTableRW}.
*/
- protected BaseTable> getTable() {
+ protected BaseTableRW> getTable() {
return ENTITY_TABLE;
}
@@ -543,7 +545,8 @@ class GenericEntityReader extends TimelineEntityReader {
TimelineEntityFilters filters = getFilters();
// fetch created time
- Long createdTime = (Long) EntityColumn.CREATED_TIME.readResult(result);
+ Long createdTime = (Long) ColumnRWHelper.readResult(result,
+ EntityColumn.CREATED_TIME);
entity.setCreatedTime(createdTime);
EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
@@ -635,11 +638,12 @@ class GenericEntityReader extends TimelineEntityReader {
* @param isConfig if true, means we are reading configs, otherwise info.
* @throws IOException if any problem is encountered while reading result.
*/
- protected void readKeyValuePairs(TimelineEntity entity, Result result,
+ protected > void readKeyValuePairs(
+ TimelineEntity entity, Result result,
ColumnPrefix prefix, boolean isConfig) throws IOException {
// info and configuration are of type Map
Map columns =
- prefix.readResults(result, stringKeyConverter);
+ ColumnRWHelper.readResults(result, prefix, stringKeyConverter);
if (isConfig) {
for (Map.Entry column : columns.entrySet()) {
entity.addConfig(column.getKey(), column.getValue().toString());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java
similarity index 98%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java
index e780dccd3a8..faed34857d7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java
@@ -42,7 +42,8 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderUtils;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
@@ -51,14 +52,14 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubA
import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException;
import com.google.common.base.Preconditions;
class SubApplicationEntityReader extends GenericEntityReader {
- private static final SubApplicationTable SUB_APPLICATION_TABLE =
- new SubApplicationTable();
+ private static final SubApplicationTableRW SUB_APPLICATION_TABLE =
+ new SubApplicationTableRW();
SubApplicationEntityReader(TimelineReaderContext ctxt,
TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) {
@@ -71,9 +72,9 @@ class SubApplicationEntityReader extends GenericEntityReader {
}
/**
- * Uses the {@link SubApplicationTable}.
+ * Uses the {@link SubApplicationTableRW}.
*/
- protected BaseTable> getTable() {
+ protected BaseTableRW> getTable() {
return SUB_APPLICATION_TABLE;
}
@@ -403,8 +404,8 @@ class SubApplicationEntityReader extends GenericEntityReader {
TimelineEntityFilters filters = getFilters();
// fetch created time
- Long createdTime =
- (Long) SubApplicationColumn.CREATED_TIME.readResult(result);
+ Long createdTime = (Long) ColumnRWHelper.readResult(result,
+ SubApplicationColumn.CREATED_TIME);
entity.setCreatedTime(createdTime);
EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
similarity index 93%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
index 07e84234254..3168163ed96 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
@@ -44,7 +44,9 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContex
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter;
@@ -72,7 +74,7 @@ public abstract class TimelineEntityReader extends
/**
* Main table the entity reader uses.
*/
- private BaseTable> table;
+ private BaseTableRW> table;
/**
* Used to convert strings key components to and from storage format.
@@ -261,7 +263,7 @@ public abstract class TimelineEntityReader extends
*
* @return A reference to the table.
*/
- protected BaseTable> getTable() {
+ protected BaseTableRW> getTable() {
return table;
}
@@ -314,8 +316,8 @@ public abstract class TimelineEntityReader extends
protected void readMetrics(TimelineEntity entity, Result result,
ColumnPrefix> columnPrefix) throws IOException {
NavigableMap> metricsResult =
- columnPrefix.readResultsWithTimestamps(
- result, stringKeyConverter);
+ ColumnRWHelper.readResultsWithTimestamps(
+ result, columnPrefix, stringKeyConverter);
for (Map.Entry> metricResult:
metricsResult.entrySet()) {
TimelineMetric metric = new TimelineMetric();
@@ -340,7 +342,7 @@ public abstract class TimelineEntityReader extends
return singleEntityRead;
}
- protected void setTable(BaseTable> baseTable) {
+ protected void setTable(BaseTableRW> baseTable) {
this.table = baseTable;
}
@@ -367,8 +369,9 @@ public abstract class TimelineEntityReader extends
* @param columns set of column qualifiers.
* @return filter list.
*/
- protected FilterList createFiltersFromColumnQualifiers(
- ColumnPrefix colPrefix, Set columns) {
+ protected > FilterList
+ createFiltersFromColumnQualifiers(
+ ColumnPrefix colPrefix, Set columns) {
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
for (String column : columns) {
// For columns which have compound column qualifiers (eg. events), we need
@@ -381,8 +384,8 @@ public abstract class TimelineEntityReader extends
return list;
}
- protected byte[] createColQualifierPrefix(ColumnPrefix colPrefix,
- String column) {
+ protected > byte[] createColQualifierPrefix(
+ ColumnPrefix colPrefix, String column) {
if (colPrefix == ApplicationColumnPrefix.EVENT
|| colPrefix == EntityColumnPrefix.EVENT) {
return new EventColumnName(column, null, null).getColumnQualifier();
@@ -402,11 +405,12 @@ public abstract class TimelineEntityReader extends
* isRelatedTo, otherwise its added to relatesTo.
* @throws IOException if any problem is encountered while reading result.
*/
- protected void readRelationship(TimelineEntity entity, Result result,
+ protected > void readRelationship(
+ TimelineEntity entity, Result result,
ColumnPrefix prefix, boolean isRelatedTo) throws IOException {
// isRelatedTo and relatesTo are of type Map>
- Map columns =
- prefix.readResults(result, stringKeyConverter);
+ Map columns = ColumnRWHelper.readResults(
+ result, prefix, stringKeyConverter);
for (Map.Entry column : columns.entrySet()) {
for (String id : Separator.VALUES.splitEncoded(column.getValue()
.toString())) {
@@ -430,11 +434,12 @@ public abstract class TimelineEntityReader extends
* @param prefix column prefix.
* @throws IOException if any problem is encountered while reading result.
*/
- protected static void readEvents(TimelineEntity entity, Result result,
+ protected static > void readEvents(
+ TimelineEntity entity, Result result,
ColumnPrefix prefix) throws IOException {
Map eventsMap = new HashMap<>();
- Map eventsResult =
- prefix.readResults(result, new EventColumnNameConverter());
+ Map eventsResult = ColumnRWHelper.readResults(
+ result, prefix, new EventColumnNameConverter());
for (Map.Entry
eventResult : eventsResult.entrySet()) {
EventColumnName eventColumnName = eventResult.getKey();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTableRW.java
similarity index 64%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTableRW.java
index 785a243d93d..256b24b72e7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationTableRW.java
@@ -26,52 +26,15 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * The sub application table has column families:
- * info, config and metrics.
- * Info stores information about a timeline entity object
- * config stores configuration data of a timeline entity object
- * metrics stores the metrics of a timeline entity object
- *
- * Example sub application table record:
- *
- *
- * |-------------------------------------------------------------------------|
- * | Row | Column Family | Column Family| Column Family|
- * | key | info | metrics | config |
- * |-------------------------------------------------------------------------|
- * | subAppUserId! | id:entityId | metricId1: | configKey1: |
- * | clusterId! | type:entityType | metricValue1 | configValue1 |
- * | entityType! | | @timestamp1 | |
- * | idPrefix!| | | | configKey2: |
- * | entityId! | created_time: | metricId1: | configValue2 |
- * | userId | 1392993084018 | metricValue2 | |
- * | | | @timestamp2 | |
- * | | i!infoKey: | | |
- * | | infoValue | metricId1: | |
- * | | | metricValue1 | |
- * | | | @timestamp2 | |
- * | | e!eventId=timestamp= | | |
- * | | infoKey: | | |
- * | | eventInfoValue | | |
- * | | | | |
- * | | r!relatesToKey: | | |
- * | | id3=id4=id5 | | |
- * | | | | |
- * | | s!isRelatedToKey | | |
- * | | id7=id9=id6 | | |
- * | | | | |
- * | | flowVersion: | | |
- * | | versionValue | | |
- * |-------------------------------------------------------------------------|
- *
+ * Create, read and write to the SubApplication table.
*/
-public class SubApplicationTable extends BaseTable {
+public class SubApplicationTableRW extends BaseTableRW {
/** sub app prefix. */
private static final String PREFIX =
YarnConfiguration.TIMELINE_SERVICE_PREFIX + "subapplication";
@@ -104,9 +67,9 @@ public class SubApplicationTable extends BaseTable {
private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000;
private static final Logger LOG = LoggerFactory.getLogger(
- SubApplicationTable.class);
+ SubApplicationTableRW.class);
- public SubApplicationTable() {
+ public SubApplicationTableRW() {
super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME);
}
@@ -114,8 +77,8 @@ public class SubApplicationTable extends BaseTable {
* (non-Javadoc)
*
* @see
- * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable
- * (org.apache.hadoop.hbase.client.Admin,
+ * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW#
+ * createTable(org.apache.hadoop.hbase.client.Admin,
* org.apache.hadoop.conf.Configuration)
*/
public void createTable(Admin admin, Configuration hbaseConf)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/package-info.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java
similarity index 100%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/pom.xml
new file mode 100644
index 00000000000..21cf616ec5c
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/pom.xml
@@ -0,0 +1,131 @@
+
+
+
+
+ hadoop-yarn-server-timelineservice-hbase
+ org.apache.hadoop
+ 2.10.0-SNAPSHOT
+
+ 4.0.0
+
+ hadoop-yarn-server-timelineservice-hbase-common
+ Apache Hadoop YARN TimelineService HBase Common
+ 2.10.0-SNAPSHOT
+
+
+ ${project.parent.parent.parent.basedir}
+
+
+
+
+