Revert "YARN-7581. HBase filters are not constructed correctly in ATSv2. Contributed by Habio Chen."
This reverts commit a01091e90c
.
This commit is contained in:
parent
8bbd560805
commit
a253925862
|
@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.filter.FilterList;
|
||||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||||
import org.apache.hadoop.hbase.filter.PageFilter;
|
import org.apache.hadoop.hbase.filter.PageFilter;
|
||||||
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
|
||||||
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
||||||
|
@ -259,8 +258,7 @@ class ApplicationEntityReader extends GenericEntityReader {
|
||||||
* @throws IOException if any problem occurs while updating filter list.
|
* @throws IOException if any problem occurs while updating filter list.
|
||||||
*/
|
*/
|
||||||
private void updateFilterForConfsAndMetricsToRetrieve(
|
private void updateFilterForConfsAndMetricsToRetrieve(
|
||||||
FilterList listBasedOnFields, Set<String> cfsInFields)
|
FilterList listBasedOnFields) throws IOException {
|
||||||
throws IOException {
|
|
||||||
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
|
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
|
||||||
// Please note that if confsToRetrieve is specified, we would have added
|
// Please note that if confsToRetrieve is specified, we would have added
|
||||||
// CONFS to fields to retrieve in augmentParams() even if not specified.
|
// CONFS to fields to retrieve in augmentParams() even if not specified.
|
||||||
|
@ -270,8 +268,6 @@ class ApplicationEntityReader extends GenericEntityReader {
|
||||||
createFilterForConfsOrMetricsToRetrieve(
|
createFilterForConfsOrMetricsToRetrieve(
|
||||||
dataToRetrieve.getConfsToRetrieve(),
|
dataToRetrieve.getConfsToRetrieve(),
|
||||||
ApplicationColumnFamily.CONFIGS, ApplicationColumnPrefix.CONFIG));
|
ApplicationColumnFamily.CONFIGS, ApplicationColumnPrefix.CONFIG));
|
||||||
cfsInFields.add(
|
|
||||||
Bytes.toString(ApplicationColumnFamily.CONFIGS.getBytes()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Please note that if metricsToRetrieve is specified, we would have added
|
// Please note that if metricsToRetrieve is specified, we would have added
|
||||||
|
@ -282,14 +278,11 @@ class ApplicationEntityReader extends GenericEntityReader {
|
||||||
createFilterForConfsOrMetricsToRetrieve(
|
createFilterForConfsOrMetricsToRetrieve(
|
||||||
dataToRetrieve.getMetricsToRetrieve(),
|
dataToRetrieve.getMetricsToRetrieve(),
|
||||||
ApplicationColumnFamily.METRICS, ApplicationColumnPrefix.METRIC));
|
ApplicationColumnFamily.METRICS, ApplicationColumnPrefix.METRIC));
|
||||||
cfsInFields.add(
|
|
||||||
Bytes.toString(ApplicationColumnFamily.METRICS.getBytes()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FilterList constructFilterListBasedOnFields(Set<String> cfsInFields)
|
protected FilterList constructFilterListBasedOnFields() throws IOException {
|
||||||
throws IOException {
|
|
||||||
if (!needCreateFilterListBasedOnFields()) {
|
if (!needCreateFilterListBasedOnFields()) {
|
||||||
// Fetch all the columns. No need of a filter.
|
// Fetch all the columns. No need of a filter.
|
||||||
return null;
|
return null;
|
||||||
|
@ -310,9 +303,8 @@ class ApplicationEntityReader extends GenericEntityReader {
|
||||||
excludeFieldsFromInfoColFamily(infoColFamilyList);
|
excludeFieldsFromInfoColFamily(infoColFamilyList);
|
||||||
}
|
}
|
||||||
listBasedOnFields.addFilter(infoColFamilyList);
|
listBasedOnFields.addFilter(infoColFamilyList);
|
||||||
cfsInFields.add(Bytes.toString(ApplicationColumnFamily.INFO.getBytes()));
|
|
||||||
|
|
||||||
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields, cfsInFields);
|
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields);
|
||||||
return listBasedOnFields;
|
return listBasedOnFields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.reader;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.client.Connection;
|
import org.apache.hadoop.hbase.client.Connection;
|
||||||
|
@ -98,8 +97,7 @@ class FlowActivityEntityReader extends TimelineEntityReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FilterList constructFilterListBasedOnFields(
|
protected FilterList constructFilterListBasedOnFields() {
|
||||||
Set<String> cfsInFields) {
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.reader;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.client.Connection;
|
import org.apache.hadoop.hbase.client.Connection;
|
||||||
|
@ -35,7 +34,6 @@ import org.apache.hadoop.hbase.filter.FilterList;
|
||||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||||
import org.apache.hadoop.hbase.filter.PageFilter;
|
import org.apache.hadoop.hbase.filter.PageFilter;
|
||||||
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity;
|
import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity;
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
||||||
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
||||||
|
@ -154,8 +152,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FilterList constructFilterListBasedOnFields(
|
protected FilterList constructFilterListBasedOnFields() throws IOException {
|
||||||
Set<String> cfsInFields) throws IOException {
|
|
||||||
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
|
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
|
||||||
// By default fetch everything in INFO column family.
|
// By default fetch everything in INFO column family.
|
||||||
FamilyFilter infoColumnFamily =
|
FamilyFilter infoColumnFamily =
|
||||||
|
@ -169,7 +166,6 @@ class FlowRunEntityReader extends TimelineEntityReader {
|
||||||
&& !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
|
&& !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
|
||||||
FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
|
FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
|
||||||
infoColFamilyList.addFilter(infoColumnFamily);
|
infoColFamilyList.addFilter(infoColumnFamily);
|
||||||
cfsInFields.add(Bytes.toString(FlowRunColumnFamily.INFO.getBytes()));
|
|
||||||
infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL,
|
infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL,
|
||||||
new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC
|
new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC
|
||||||
.getColumnPrefixBytes(""))));
|
.getColumnPrefixBytes(""))));
|
||||||
|
@ -186,7 +182,6 @@ class FlowRunEntityReader extends TimelineEntityReader {
|
||||||
&& !metricsToRetrieve.getFilterList().isEmpty()) {
|
&& !metricsToRetrieve.getFilterList().isEmpty()) {
|
||||||
FilterList infoColFamilyList = new FilterList();
|
FilterList infoColFamilyList = new FilterList();
|
||||||
infoColFamilyList.addFilter(infoColumnFamily);
|
infoColFamilyList.addFilter(infoColumnFamily);
|
||||||
cfsInFields.add(Bytes.toString(FlowRunColumnFamily.INFO.getBytes()));
|
|
||||||
FilterList columnsList = updateFixedColumns();
|
FilterList columnsList = updateFixedColumns();
|
||||||
columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(
|
columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(
|
||||||
FlowRunColumnPrefix.METRIC, metricsToRetrieve));
|
FlowRunColumnPrefix.METRIC, metricsToRetrieve));
|
||||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.filter.FilterList;
|
||||||
import org.apache.hadoop.hbase.filter.PageFilter;
|
import org.apache.hadoop.hbase.filter.PageFilter;
|
||||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||||
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
||||||
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
||||||
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
|
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
|
||||||
|
@ -350,8 +349,7 @@ class GenericEntityReader extends TimelineEntityReader {
|
||||||
* @throws IOException if any problem occurs while updating filter list.
|
* @throws IOException if any problem occurs while updating filter list.
|
||||||
*/
|
*/
|
||||||
private void updateFilterForConfsAndMetricsToRetrieve(
|
private void updateFilterForConfsAndMetricsToRetrieve(
|
||||||
FilterList listBasedOnFields, Set<String> cfsInFields)
|
FilterList listBasedOnFields) throws IOException {
|
||||||
throws IOException {
|
|
||||||
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
|
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
|
||||||
// Please note that if confsToRetrieve is specified, we would have added
|
// Please note that if confsToRetrieve is specified, we would have added
|
||||||
// CONFS to fields to retrieve in augmentParams() even if not specified.
|
// CONFS to fields to retrieve in augmentParams() even if not specified.
|
||||||
|
@ -361,7 +359,6 @@ class GenericEntityReader extends TimelineEntityReader {
|
||||||
.createFilterForConfsOrMetricsToRetrieve(
|
.createFilterForConfsOrMetricsToRetrieve(
|
||||||
dataToRetrieve.getConfsToRetrieve(), EntityColumnFamily.CONFIGS,
|
dataToRetrieve.getConfsToRetrieve(), EntityColumnFamily.CONFIGS,
|
||||||
EntityColumnPrefix.CONFIG));
|
EntityColumnPrefix.CONFIG));
|
||||||
cfsInFields.add(Bytes.toString(EntityColumnFamily.CONFIGS.getBytes()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Please note that if metricsToRetrieve is specified, we would have added
|
// Please note that if metricsToRetrieve is specified, we would have added
|
||||||
|
@ -372,13 +369,11 @@ class GenericEntityReader extends TimelineEntityReader {
|
||||||
.createFilterForConfsOrMetricsToRetrieve(
|
.createFilterForConfsOrMetricsToRetrieve(
|
||||||
dataToRetrieve.getMetricsToRetrieve(),
|
dataToRetrieve.getMetricsToRetrieve(),
|
||||||
EntityColumnFamily.METRICS, EntityColumnPrefix.METRIC));
|
EntityColumnFamily.METRICS, EntityColumnPrefix.METRIC));
|
||||||
cfsInFields.add(Bytes.toString(EntityColumnFamily.METRICS.getBytes()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FilterList constructFilterListBasedOnFields(Set<String> cfsInFields)
|
protected FilterList constructFilterListBasedOnFields() throws IOException {
|
||||||
throws IOException {
|
|
||||||
if (!needCreateFilterListBasedOnFields()) {
|
if (!needCreateFilterListBasedOnFields()) {
|
||||||
// Fetch all the columns. No need of a filter.
|
// Fetch all the columns. No need of a filter.
|
||||||
return null;
|
return null;
|
||||||
|
@ -399,8 +394,7 @@ class GenericEntityReader extends TimelineEntityReader {
|
||||||
excludeFieldsFromInfoColFamily(infoColFamilyList);
|
excludeFieldsFromInfoColFamily(infoColFamilyList);
|
||||||
}
|
}
|
||||||
listBasedOnFields.addFilter(infoColFamilyList);
|
listBasedOnFields.addFilter(infoColFamilyList);
|
||||||
cfsInFields.add(Bytes.toString(EntityColumnFamily.INFO.getBytes()));
|
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields);
|
||||||
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields, cfsInFields);
|
|
||||||
return listBasedOnFields;
|
return listBasedOnFields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.filter.FilterList;
|
||||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||||
import org.apache.hadoop.hbase.filter.PageFilter;
|
import org.apache.hadoop.hbase.filter.PageFilter;
|
||||||
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
||||||
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
|
||||||
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
|
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
|
||||||
|
@ -248,8 +247,7 @@ class SubApplicationEntityReader extends GenericEntityReader {
|
||||||
* @throws IOException if any problem occurs while updating filter list.
|
* @throws IOException if any problem occurs while updating filter list.
|
||||||
*/
|
*/
|
||||||
private void updateFilterForConfsAndMetricsToRetrieve(
|
private void updateFilterForConfsAndMetricsToRetrieve(
|
||||||
FilterList listBasedOnFields, Set<String> cfsInFields)
|
FilterList listBasedOnFields) throws IOException {
|
||||||
throws IOException {
|
|
||||||
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
|
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
|
||||||
// Please note that if confsToRetrieve is specified, we would have added
|
// Please note that if confsToRetrieve is specified, we would have added
|
||||||
// CONFS to fields to retrieve in augmentParams() even if not specified.
|
// CONFS to fields to retrieve in augmentParams() even if not specified.
|
||||||
|
@ -260,8 +258,6 @@ class SubApplicationEntityReader extends GenericEntityReader {
|
||||||
dataToRetrieve.getConfsToRetrieve(),
|
dataToRetrieve.getConfsToRetrieve(),
|
||||||
SubApplicationColumnFamily.CONFIGS,
|
SubApplicationColumnFamily.CONFIGS,
|
||||||
SubApplicationColumnPrefix.CONFIG));
|
SubApplicationColumnPrefix.CONFIG));
|
||||||
cfsInFields.add(
|
|
||||||
Bytes.toString(SubApplicationColumnFamily.CONFIGS.getBytes()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Please note that if metricsToRetrieve is specified, we would have added
|
// Please note that if metricsToRetrieve is specified, we would have added
|
||||||
|
@ -273,14 +269,11 @@ class SubApplicationEntityReader extends GenericEntityReader {
|
||||||
dataToRetrieve.getMetricsToRetrieve(),
|
dataToRetrieve.getMetricsToRetrieve(),
|
||||||
SubApplicationColumnFamily.METRICS,
|
SubApplicationColumnFamily.METRICS,
|
||||||
SubApplicationColumnPrefix.METRIC));
|
SubApplicationColumnPrefix.METRIC));
|
||||||
cfsInFields.add(
|
|
||||||
Bytes.toString(SubApplicationColumnFamily.METRICS.getBytes()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FilterList constructFilterListBasedOnFields(Set<String> cfsInFields)
|
protected FilterList constructFilterListBasedOnFields() throws IOException {
|
||||||
throws IOException {
|
|
||||||
if (!needCreateFilterListBasedOnFields()) {
|
if (!needCreateFilterListBasedOnFields()) {
|
||||||
// Fetch all the columns. No need of a filter.
|
// Fetch all the columns. No need of a filter.
|
||||||
return null;
|
return null;
|
||||||
|
@ -300,9 +293,7 @@ class SubApplicationEntityReader extends GenericEntityReader {
|
||||||
excludeFieldsFromInfoColFamily(infoColFamilyList);
|
excludeFieldsFromInfoColFamily(infoColFamilyList);
|
||||||
}
|
}
|
||||||
listBasedOnFields.addFilter(infoColFamilyList);
|
listBasedOnFields.addFilter(infoColFamilyList);
|
||||||
cfsInFields.add(
|
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields);
|
||||||
Bytes.toString(SubApplicationColumnFamily.INFO.getBytes()));
|
|
||||||
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields, cfsInFields);
|
|
||||||
return listBasedOnFields;
|
return listBasedOnFields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,16 +30,11 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.client.Connection;
|
import org.apache.hadoop.hbase.client.Connection;
|
||||||
import org.apache.hadoop.hbase.client.Result;
|
import org.apache.hadoop.hbase.client.Result;
|
||||||
import org.apache.hadoop.hbase.client.ResultScanner;
|
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||||
import org.apache.hadoop.hbase.filter.BinaryComparator;
|
|
||||||
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
|
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
|
||||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||||
import org.apache.hadoop.hbase.filter.FamilyFilter;
|
|
||||||
import org.apache.hadoop.hbase.filter.Filter;
|
|
||||||
import org.apache.hadoop.hbase.filter.FilterList;
|
import org.apache.hadoop.hbase.filter.FilterList;
|
||||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||||
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
||||||
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
|
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
|
||||||
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
|
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
|
||||||
|
@ -127,12 +122,11 @@ public abstract class TimelineEntityReader extends
|
||||||
* results fetched from HBase back-end storage. This is called only for
|
* results fetched from HBase back-end storage. This is called only for
|
||||||
* multiple entity reads.
|
* multiple entity reads.
|
||||||
*
|
*
|
||||||
* @param cfsInFields column families in the fields
|
|
||||||
* @return a {@link FilterList} object.
|
* @return a {@link FilterList} object.
|
||||||
* @throws IOException if any problem occurs while creating filter list.
|
* @throws IOException if any problem occurs while creating filter list.
|
||||||
*/
|
*/
|
||||||
protected abstract FilterList constructFilterListBasedOnFields(
|
protected abstract FilterList constructFilterListBasedOnFields()
|
||||||
Set<String> cfsInFields) throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a {@link FilterList} based on info, config and metric filters. This
|
* Creates a {@link FilterList} based on info, config and metric filters. This
|
||||||
|
@ -157,9 +151,7 @@ public abstract class TimelineEntityReader extends
|
||||||
FilterList listBasedOnFilters = constructFilterListBasedOnFilters();
|
FilterList listBasedOnFilters = constructFilterListBasedOnFilters();
|
||||||
boolean hasListBasedOnFilters = listBasedOnFilters != null &&
|
boolean hasListBasedOnFilters = listBasedOnFilters != null &&
|
||||||
!listBasedOnFilters.getFilters().isEmpty();
|
!listBasedOnFilters.getFilters().isEmpty();
|
||||||
Set<String> cfsInListBasedOnFields = new HashSet<>(0);
|
FilterList listBasedOnFields = constructFilterListBasedOnFields();
|
||||||
FilterList listBasedOnFields =
|
|
||||||
constructFilterListBasedOnFields(cfsInListBasedOnFields);
|
|
||||||
boolean hasListBasedOnFields = listBasedOnFields != null &&
|
boolean hasListBasedOnFields = listBasedOnFields != null &&
|
||||||
!listBasedOnFields.getFilters().isEmpty();
|
!listBasedOnFields.getFilters().isEmpty();
|
||||||
// If filter lists based on both filters and fields can be created,
|
// If filter lists based on both filters and fields can be created,
|
||||||
|
@ -172,21 +164,6 @@ public abstract class TimelineEntityReader extends
|
||||||
if (hasListBasedOnFilters && hasListBasedOnFields) {
|
if (hasListBasedOnFilters && hasListBasedOnFields) {
|
||||||
FilterList list = new FilterList();
|
FilterList list = new FilterList();
|
||||||
list.addFilter(listBasedOnFilters);
|
list.addFilter(listBasedOnFilters);
|
||||||
|
|
||||||
Set<String> cfsInListBasedOnFilters = new HashSet<>(0);
|
|
||||||
extractColumnFamiliesFromFiltersBasedOnFilters(
|
|
||||||
listBasedOnFilters, cfsInListBasedOnFilters);
|
|
||||||
|
|
||||||
// must exclude cfs that are already covered in fields-based filters
|
|
||||||
// otherwise we will return the whole cf
|
|
||||||
cfsInListBasedOnFilters.removeAll(cfsInListBasedOnFields);
|
|
||||||
|
|
||||||
if (!cfsInListBasedOnFilters.isEmpty()) {
|
|
||||||
for (String cf: cfsInListBasedOnFilters) {
|
|
||||||
listBasedOnFields.addFilter(new FamilyFilter(CompareOp.EQUAL,
|
|
||||||
new BinaryComparator(Bytes.toBytes(cf))));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
list.addFilter(listBasedOnFields);
|
list.addFilter(listBasedOnFields);
|
||||||
return list;
|
return list;
|
||||||
} else if (hasListBasedOnFilters) {
|
} else if (hasListBasedOnFilters) {
|
||||||
|
@ -197,21 +174,6 @@ public abstract class TimelineEntityReader extends
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void extractColumnFamiliesFromFiltersBasedOnFilters(
|
|
||||||
Filter hbaseFilterBasedOnTLSFilter, Set<String> columnFamilies) {
|
|
||||||
if (hbaseFilterBasedOnTLSFilter instanceof SingleColumnValueFilter) {
|
|
||||||
byte[] cf = ((SingleColumnValueFilter)
|
|
||||||
hbaseFilterBasedOnTLSFilter).getFamily();
|
|
||||||
columnFamilies.add(Bytes.toString(cf));
|
|
||||||
} else if (hbaseFilterBasedOnTLSFilter instanceof FilterList) {
|
|
||||||
FilterList filterListBase = (FilterList) hbaseFilterBasedOnTLSFilter;
|
|
||||||
for (Filter fs: filterListBase.getFilters()) {
|
|
||||||
extractColumnFamiliesFromFiltersBasedOnFilters(fs, columnFamilies);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
protected TimelineDataToRetrieve getDataToRetrieve() {
|
protected TimelineDataToRetrieve getDataToRetrieve() {
|
||||||
return dataToRetrieve;
|
return dataToRetrieve;
|
||||||
}
|
}
|
||||||
|
@ -244,7 +206,7 @@ public abstract class TimelineEntityReader extends
|
||||||
validateParams();
|
validateParams();
|
||||||
augmentParams(hbaseConf, conn);
|
augmentParams(hbaseConf, conn);
|
||||||
|
|
||||||
FilterList filterList = constructFilterListBasedOnFields(new HashSet<>(0));
|
FilterList filterList = constructFilterListBasedOnFields();
|
||||||
if (LOG.isDebugEnabled() && filterList != null) {
|
if (LOG.isDebugEnabled() && filterList != null) {
|
||||||
LOG.debug("FilterList created for get is - " + filterList);
|
LOG.debug("FilterList created for get is - " + filterList);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue