diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java index 29ba1845db9..7440316c9e2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; @@ -259,8 +258,7 @@ class ApplicationEntityReader extends GenericEntityReader { * @throws IOException if any problem occurs while updating filter list. */ private void updateFilterForConfsAndMetricsToRetrieve( - FilterList listBasedOnFields, Set cfsInFields) - throws IOException { + FilterList listBasedOnFields) throws IOException { TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve(); // Please note that if confsToRetrieve is specified, we would have added // CONFS to fields to retrieve in augmentParams() even if not specified. @@ -270,8 +268,6 @@ class ApplicationEntityReader extends GenericEntityReader { createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getConfsToRetrieve(), ApplicationColumnFamily.CONFIGS, ApplicationColumnPrefix.CONFIG)); - cfsInFields.add( - Bytes.toString(ApplicationColumnFamily.CONFIGS.getBytes())); } // Please note that if metricsToRetrieve is specified, we would have added @@ -282,14 +278,11 @@ class ApplicationEntityReader extends GenericEntityReader { createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getMetricsToRetrieve(), ApplicationColumnFamily.METRICS, ApplicationColumnPrefix.METRIC)); - cfsInFields.add( - Bytes.toString(ApplicationColumnFamily.METRICS.getBytes())); } } @Override - protected FilterList constructFilterListBasedOnFields(Set cfsInFields) - throws IOException { + protected FilterList constructFilterListBasedOnFields() throws IOException { if (!needCreateFilterListBasedOnFields()) { // Fetch all the columns. No need of a filter. return null; @@ -310,9 +303,8 @@ class ApplicationEntityReader extends GenericEntityReader { excludeFieldsFromInfoColFamily(infoColFamilyList); } listBasedOnFields.addFilter(infoColFamilyList); - cfsInFields.add(Bytes.toString(ApplicationColumnFamily.INFO.getBytes())); - updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields, cfsInFields); + updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields); return listBasedOnFields; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java index 7b7eef57015..d0a0f3bb46e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java @@ -20,7 +20,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.reader; import java.io.IOException; import java.util.Map; -import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Connection; @@ -98,8 +97,7 @@ class FlowActivityEntityReader extends TimelineEntityReader { } @Override - protected FilterList constructFilterListBasedOnFields( - Set cfsInFields) { + protected FilterList constructFilterListBasedOnFields() { return null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java index 80d3e9b3363..33a2cf67a27 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java @@ -19,7 +19,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.reader; import java.io.IOException; import java.util.EnumSet; -import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Connection; @@ -35,7 +34,6 @@ import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; @@ -154,8 +152,7 @@ class FlowRunEntityReader extends TimelineEntityReader { } @Override - protected FilterList constructFilterListBasedOnFields( - Set cfsInFields) throws IOException { + protected FilterList constructFilterListBasedOnFields() throws IOException { FilterList list = new FilterList(Operator.MUST_PASS_ONE); // By default fetch everything in INFO column family. FamilyFilter infoColumnFamily = @@ -169,7 +166,6 @@ class FlowRunEntityReader extends TimelineEntityReader { && !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) { FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE); infoColFamilyList.addFilter(infoColumnFamily); - cfsInFields.add(Bytes.toString(FlowRunColumnFamily.INFO.getBytes())); infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC .getColumnPrefixBytes("")))); @@ -186,7 +182,6 @@ class FlowRunEntityReader extends TimelineEntityReader { && !metricsToRetrieve.getFilterList().isEmpty()) { FilterList infoColFamilyList = new FilterList(); infoColFamilyList.addFilter(infoColumnFamily); - cfsInFields.add(Bytes.toString(FlowRunColumnFamily.INFO.getBytes())); FilterList columnsList = updateFixedColumns(); columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList( FlowRunColumnPrefix.METRIC, metricsToRetrieve)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java index 6e62f20aa7d..02eca84f1f2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java @@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; @@ -350,8 +349,7 @@ class GenericEntityReader extends TimelineEntityReader { * @throws IOException if any problem occurs while updating filter list. */ private void updateFilterForConfsAndMetricsToRetrieve( - FilterList listBasedOnFields, Set cfsInFields) - throws IOException { + FilterList listBasedOnFields) throws IOException { TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve(); // Please note that if confsToRetrieve is specified, we would have added // CONFS to fields to retrieve in augmentParams() even if not specified. @@ -361,7 +359,6 @@ class GenericEntityReader extends TimelineEntityReader { .createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getConfsToRetrieve(), EntityColumnFamily.CONFIGS, EntityColumnPrefix.CONFIG)); - cfsInFields.add(Bytes.toString(EntityColumnFamily.CONFIGS.getBytes())); } // Please note that if metricsToRetrieve is specified, we would have added @@ -372,13 +369,11 @@ class GenericEntityReader extends TimelineEntityReader { .createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getMetricsToRetrieve(), EntityColumnFamily.METRICS, EntityColumnPrefix.METRIC)); - cfsInFields.add(Bytes.toString(EntityColumnFamily.METRICS.getBytes())); } } @Override - protected FilterList constructFilterListBasedOnFields(Set cfsInFields) - throws IOException { + protected FilterList constructFilterListBasedOnFields() throws IOException { if (!needCreateFilterListBasedOnFields()) { // Fetch all the columns. No need of a filter. return null; @@ -399,8 +394,7 @@ class GenericEntityReader extends TimelineEntityReader { excludeFieldsFromInfoColFamily(infoColFamilyList); } listBasedOnFields.addFilter(infoColFamilyList); - cfsInFields.add(Bytes.toString(EntityColumnFamily.INFO.getBytes())); - updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields, cfsInFields); + updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields); return listBasedOnFields; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java index 6a91c7b46d0..faed34857d7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; @@ -248,8 +247,7 @@ class SubApplicationEntityReader extends GenericEntityReader { * @throws IOException if any problem occurs while updating filter list. */ private void updateFilterForConfsAndMetricsToRetrieve( - FilterList listBasedOnFields, Set cfsInFields) - throws IOException { + FilterList listBasedOnFields) throws IOException { TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve(); // Please note that if confsToRetrieve is specified, we would have added // CONFS to fields to retrieve in augmentParams() even if not specified. @@ -260,8 +258,6 @@ class SubApplicationEntityReader extends GenericEntityReader { dataToRetrieve.getConfsToRetrieve(), SubApplicationColumnFamily.CONFIGS, SubApplicationColumnPrefix.CONFIG)); - cfsInFields.add( - Bytes.toString(SubApplicationColumnFamily.CONFIGS.getBytes())); } // Please note that if metricsToRetrieve is specified, we would have added @@ -273,14 +269,11 @@ class SubApplicationEntityReader extends GenericEntityReader { dataToRetrieve.getMetricsToRetrieve(), SubApplicationColumnFamily.METRICS, SubApplicationColumnPrefix.METRIC)); - cfsInFields.add( - Bytes.toString(SubApplicationColumnFamily.METRICS.getBytes())); } } @Override - protected FilterList constructFilterListBasedOnFields(Set cfsInFields) - throws IOException { + protected FilterList constructFilterListBasedOnFields() throws IOException { if (!needCreateFilterListBasedOnFields()) { // Fetch all the columns. No need of a filter. return null; @@ -300,9 +293,7 @@ class SubApplicationEntityReader extends GenericEntityReader { excludeFieldsFromInfoColFamily(infoColFamilyList); } listBasedOnFields.addFilter(infoColFamilyList); - cfsInFields.add( - Bytes.toString(SubApplicationColumnFamily.INFO.getBytes())); - updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields, cfsInFields); + updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields); return listBasedOnFields; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java index 43ba2afe739..3168163ed96 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java @@ -30,16 +30,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.filter.FamilyFilter; -import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; @@ -127,12 +122,11 @@ public abstract class TimelineEntityReader extends * results fetched from HBase back-end storage. This is called only for * multiple entity reads. * - * @param cfsInFields column families in the fields * @return a {@link FilterList} object. * @throws IOException if any problem occurs while creating filter list. */ - protected abstract FilterList constructFilterListBasedOnFields( - Set cfsInFields) throws IOException; + protected abstract FilterList constructFilterListBasedOnFields() + throws IOException; /** * Creates a {@link FilterList} based on info, config and metric filters. This @@ -157,9 +151,7 @@ public abstract class TimelineEntityReader extends FilterList listBasedOnFilters = constructFilterListBasedOnFilters(); boolean hasListBasedOnFilters = listBasedOnFilters != null && !listBasedOnFilters.getFilters().isEmpty(); - Set cfsInListBasedOnFields = new HashSet<>(0); - FilterList listBasedOnFields = - constructFilterListBasedOnFields(cfsInListBasedOnFields); + FilterList listBasedOnFields = constructFilterListBasedOnFields(); boolean hasListBasedOnFields = listBasedOnFields != null && !listBasedOnFields.getFilters().isEmpty(); // If filter lists based on both filters and fields can be created, @@ -172,21 +164,6 @@ public abstract class TimelineEntityReader extends if (hasListBasedOnFilters && hasListBasedOnFields) { FilterList list = new FilterList(); list.addFilter(listBasedOnFilters); - - Set cfsInListBasedOnFilters = new HashSet<>(0); - extractColumnFamiliesFromFiltersBasedOnFilters( - listBasedOnFilters, cfsInListBasedOnFilters); - - // must exclude cfs that are already covered in fields-based filters - // otherwise we will return the whole cf - cfsInListBasedOnFilters.removeAll(cfsInListBasedOnFields); - - if (!cfsInListBasedOnFilters.isEmpty()) { - for (String cf: cfsInListBasedOnFilters) { - listBasedOnFields.addFilter(new FamilyFilter(CompareOp.EQUAL, - new BinaryComparator(Bytes.toBytes(cf)))); - } - } list.addFilter(listBasedOnFields); return list; } else if (hasListBasedOnFilters) { @@ -197,21 +174,6 @@ public abstract class TimelineEntityReader extends return null; } - private static void extractColumnFamiliesFromFiltersBasedOnFilters( - Filter hbaseFilterBasedOnTLSFilter, Set columnFamilies) { - if (hbaseFilterBasedOnTLSFilter instanceof SingleColumnValueFilter) { - byte[] cf = ((SingleColumnValueFilter) - hbaseFilterBasedOnTLSFilter).getFamily(); - columnFamilies.add(Bytes.toString(cf)); - } else if (hbaseFilterBasedOnTLSFilter instanceof FilterList) { - FilterList filterListBase = (FilterList) hbaseFilterBasedOnTLSFilter; - for (Filter fs: filterListBase.getFilters()) { - extractColumnFamiliesFromFiltersBasedOnFilters(fs, columnFamilies); - } - } - } - - protected TimelineDataToRetrieve getDataToRetrieve() { return dataToRetrieve; } @@ -244,7 +206,7 @@ public abstract class TimelineEntityReader extends validateParams(); augmentParams(hbaseConf, conn); - FilterList filterList = constructFilterListBasedOnFields(new HashSet<>(0)); + FilterList filterList = constructFilterListBasedOnFields(); if (LOG.isDebugEnabled() && filterList != null) { LOG.debug("FilterList created for get is - " + filterList); }