YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)

This commit is contained in:
Sangjin Lee 2016-04-11 21:07:32 -07:00
parent a3cf40e532
commit c2efdc415a
41 changed files with 5080 additions and 842 deletions

View File

@ -18,11 +18,14 @@
package org.apache.hadoop.yarn.server.timelineservice.reader; package org.apache.hadoop.yarn.server.timelineservice.reader;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
/** /**
* Encapsulates information regarding the filters to apply while querying. These * Encapsulates information regarding the filters to apply while querying. These
@ -36,36 +39,81 @@
* <li><b>createdTimeEnd</b> - Matched entities should not be created after * <li><b>createdTimeEnd</b> - Matched entities should not be created after
* this timestamp. If null or {@literal <=0}, defaults to * this timestamp. If null or {@literal <=0}, defaults to
* {@link Long#MAX_VALUE}.</li> * {@link Long#MAX_VALUE}.</li>
* <li><b>relatesTo</b> - Matched entities should relate to given entities. * <li><b>relatesTo</b> - Matched entities should or should not relate to given
* If null or empty, the relations are not matched.</li> * entities depending on what's specified in the filter. The entities in
* <li><b>isRelatedTo</b> - Matched entities should be related to given * relatesTo are identified by entity type and id. This is represented as
* entities. If null or empty, the relations are not matched.</li> * a {@link TimelineFilterList} object containing
* {@link TimelineKeyValuesFilter} objects, each of which contains a
* set of values for a key and the comparison operator (equals/not equals). The
* key which represents the entity type is a string and values are a set of
* entity identifiers (also string). As it is a filter list, relatesTo can be
* evaluated with logical AND/OR and we can create a hierarchy of these
* {@link TimelineKeyValuesFilter} objects. If null or empty, the relations are
* not matched.</li>
* <li><b>isRelatedTo</b> - Matched entities should or should not be related
* to given entities depending on what's specified in the filter. The entities
* in isRelatedTo are identified by entity type and id. This is represented as
* a {@link TimelineFilterList} object containing
* {@link TimelineKeyValuesFilter} objects, each of which contains a
* set of values for a key and the comparison operator (equals/not equals). The
* key which represents the entity type is a string and values are a set of
* entity identifiers (also string). As it is a filter list, relatesTo can be
* evaluated with logical AND/OR and we can create a hierarchy of these
* {@link TimelineKeyValuesFilter} objects. If null or empty, the relations are
* not matched.</li>
* <li><b>infoFilters</b> - Matched entities should have exact matches to * <li><b>infoFilters</b> - Matched entities should have exact matches to
* the given info represented as key-value pairs. If null or empty, the * the given info and should be either equal or not equal to given value
* filter is not applied.</li> * depending on what's specified in the filter. This is represented as a
* {@link TimelineFilterList} object containing {@link TimelineKeyValueFilter}
* objects, each of which contains key-value pairs with a comparison operator
* (equals/not equals). The key which represents the info key is a string but
* value can be any object. As it is a filter list, info filters can be
* evaluated with logical AND/OR and we can create a hierarchy of these
* key-value pairs. If null or empty, the filter is not applied.</li>
* <li><b>configFilters</b> - Matched entities should have exact matches to * <li><b>configFilters</b> - Matched entities should have exact matches to
* the given configs represented as key-value pairs. If null or empty, the * the given configurations and should be either equal or not equal to given
* filter is not applied.</li> * value depending on what's specified in the filter. This is represented as a
* {@link TimelineFilterList} object containing {@link TimelineKeyValueFilter}
* objects, each of which contains key-value pairs with a comparison operator
* (equals/not equals). Both key (which represents config name) and value (which
* is config value) are strings. As it is a filter list, config filters can be
* evaluated with logical AND/OR and we can create a hierarchy of these
* {@link TimelineKeyValueFilter} objects. If null or empty, the filter is not
* applied.</li>
* <li><b>metricFilters</b> - Matched entities should contain the given * <li><b>metricFilters</b> - Matched entities should contain the given
* metrics. If null or empty, the filter is not applied.</li> * metrics and satisfy the specified relation with the value. This is
* <li><b>eventFilters</b> - Matched entities should contain the given * represented as a {@link TimelineFilterList} object containing
* events. If null or empty, the filter is not applied.</li> * {@link TimelineCompareFilter} objects, each of which contains key-value pairs
* along with the specified relational/comparison operator represented by
* {@link TimelineCompareOp}. The key is a string and value is integer
* (Short/Integer/Long). As it is a filter list, metric filters can be evaluated
* with logical AND/OR and we can create a hierarchy of these
* {@link TimelineCompareFilter} objects. If null or empty, the filter is not
* applied.</li>
* <li><b>eventFilters</b> - Matched entities should contain or not contain the
* given events. This is represented as a {@link TimelineFilterList} object
* containing {@link TimelineExistsFilter} objects, each of which contains a
* value which must or must not exist depending on comparison operator specified
* in the filter. For event filters, the value represents a event id. As it is a
* filter list, event filters can be evaluated with logical AND/OR and we can
* create a hierarchy of these {@link TimelineExistsFilter} objects. If null or
* empty, the filter is not applied.</li>
* </ul> * </ul>
*/ */
@Private @Private
@Unstable @Unstable
public class TimelineEntityFilters { public class TimelineEntityFilters {
private Long limit; private long limit;
private Long createdTimeBegin; private long createdTimeBegin;
private Long createdTimeEnd; private long createdTimeEnd;
private Map<String, Set<String>> relatesTo; private TimelineFilterList relatesTo;
private Map<String, Set<String>> isRelatedTo; private TimelineFilterList isRelatedTo;
private Map<String, Object> infoFilters; private TimelineFilterList infoFilters;
private Map<String, String> configFilters; private TimelineFilterList configFilters;
private Set<String> metricFilters; private TimelineFilterList metricFilters;
private Set<String> eventFilters; private TimelineFilterList eventFilters;
private static final Long DEFAULT_BEGIN_TIME = 0L; private static final long DEFAULT_BEGIN_TIME = 0L;
private static final Long DEFAULT_END_TIME = Long.MAX_VALUE; private static final long DEFAULT_END_TIME = Long.MAX_VALUE;
/** /**
* Default limit of number of entities to return for getEntities API. * Default limit of number of entities to return for getEntities API.
@ -78,23 +126,26 @@ public TimelineEntityFilters() {
public TimelineEntityFilters( public TimelineEntityFilters(
Long entityLimit, Long timeBegin, Long timeEnd, Long entityLimit, Long timeBegin, Long timeEnd,
Map<String, Set<String>> entityRelatesTo, TimelineFilterList entityRelatesTo,
Map<String, Set<String>> entityIsRelatedTo, TimelineFilterList entityIsRelatedTo,
Map<String, Object> entityInfoFilters, TimelineFilterList entityInfoFilters,
Map<String, String> entityConfigFilters, TimelineFilterList entityConfigFilters,
Set<String> entityMetricFilters, TimelineFilterList entityMetricFilters,
Set<String> entityEventFilters) { TimelineFilterList entityEventFilters) {
this.limit = entityLimit; if (entityLimit == null || entityLimit < 0) {
if (this.limit == null || this.limit < 0) {
this.limit = DEFAULT_LIMIT; this.limit = DEFAULT_LIMIT;
} else {
this.limit = entityLimit;
} }
this.createdTimeBegin = timeBegin; if (timeBegin == null || timeBegin < 0) {
if (this.createdTimeBegin == null || this.createdTimeBegin < 0) {
this.createdTimeBegin = DEFAULT_BEGIN_TIME; this.createdTimeBegin = DEFAULT_BEGIN_TIME;
} else {
this.createdTimeBegin = timeBegin;
} }
this.createdTimeEnd = timeEnd; if (timeEnd == null || timeEnd < 0) {
if (this.createdTimeEnd == null || this.createdTimeEnd < 0) {
this.createdTimeEnd = DEFAULT_END_TIME; this.createdTimeEnd = DEFAULT_END_TIME;
} else {
this.createdTimeEnd = timeEnd;
} }
this.relatesTo = entityRelatesTo; this.relatesTo = entityRelatesTo;
this.isRelatedTo = entityIsRelatedTo; this.isRelatedTo = entityIsRelatedTo;
@ -104,84 +155,87 @@ public TimelineEntityFilters(
this.eventFilters = entityEventFilters; this.eventFilters = entityEventFilters;
} }
public Long getLimit() { public long getLimit() {
return limit; return limit;
} }
public void setLimit(Long entityLimit) { public void setLimit(Long entityLimit) {
this.limit = entityLimit; if (entityLimit == null || entityLimit < 0) {
if (this.limit == null || this.limit < 0) {
this.limit = DEFAULT_LIMIT; this.limit = DEFAULT_LIMIT;
} else {
this.limit = entityLimit;
} }
} }
public Long getCreatedTimeBegin() { public long getCreatedTimeBegin() {
return createdTimeBegin; return createdTimeBegin;
} }
public void setCreatedTimeBegin(Long timeBegin) { public void setCreatedTimeBegin(Long timeBegin) {
this.createdTimeBegin = timeBegin; if (timeBegin == null || timeBegin < 0) {
if (this.createdTimeBegin == null || this.createdTimeBegin < 0) {
this.createdTimeBegin = DEFAULT_BEGIN_TIME; this.createdTimeBegin = DEFAULT_BEGIN_TIME;
} else {
this.createdTimeBegin = timeBegin;
} }
} }
public Long getCreatedTimeEnd() { public long getCreatedTimeEnd() {
return createdTimeEnd; return createdTimeEnd;
} }
public void setCreatedTimeEnd(Long timeEnd) { public void setCreatedTimeEnd(Long timeEnd) {
this.createdTimeEnd = timeEnd; if (timeEnd == null || timeEnd < 0) {
if (this.createdTimeEnd == null || this.createdTimeEnd < 0) {
this.createdTimeEnd = DEFAULT_END_TIME; this.createdTimeEnd = DEFAULT_END_TIME;
} else {
this.createdTimeEnd = timeEnd;
} }
} }
public Map<String, Set<String>> getRelatesTo() { public TimelineFilterList getRelatesTo() {
return relatesTo; return relatesTo;
} }
public void setRelatesTo(Map<String, Set<String>> relations) { public void setRelatesTo(TimelineFilterList relations) {
this.relatesTo = relations; this.relatesTo = relations;
} }
public Map<String, Set<String>> getIsRelatedTo() { public TimelineFilterList getIsRelatedTo() {
return isRelatedTo; return isRelatedTo;
} }
public void setIsRelatedTo(Map<String, Set<String>> relations) { public void setIsRelatedTo(TimelineFilterList relations) {
this.isRelatedTo = relations; this.isRelatedTo = relations;
} }
public Map<String, Object> getInfoFilters() { public TimelineFilterList getInfoFilters() {
return infoFilters; return infoFilters;
} }
public void setInfoFilters(Map<String, Object> filters) { public void setInfoFilters(TimelineFilterList filters) {
this.infoFilters = filters; this.infoFilters = filters;
} }
public Map<String, String> getConfigFilters() { public TimelineFilterList getConfigFilters() {
return configFilters; return configFilters;
} }
public void setConfigFilters(Map<String, String> filters) { public void setConfigFilters(TimelineFilterList filters) {
this.configFilters = filters; this.configFilters = filters;
} }
public Set<String> getMetricFilters() { public TimelineFilterList getMetricFilters() {
return metricFilters; return metricFilters;
} }
public void setMetricFilters(Set<String> filters) { public void setMetricFilters(TimelineFilterList filters) {
this.metricFilters = filters; this.metricFilters = filters;
} }
public Set<String> getEventFilters() { public TimelineFilterList getEventFilters() {
return eventFilters; return eventFilters;
} }
public void setEventFilters(Set<String> filters) { public void setEventFilters(TimelineFilterList filters) {
this.eventFilters = filters; this.eventFilters = filters;
} }
} }

View File

@ -20,15 +20,19 @@
import java.io.IOException; import java.io.IOException;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map;
import java.util.Set; import java.util.Set;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper; import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
/** /**
@ -87,7 +91,7 @@ static TimelineEntityFilters createTimelineEntityFilters(String limit,
parseKeyStrValuesStr(isRelatedTo, COMMA_DELIMITER, COLON_DELIMITER), parseKeyStrValuesStr(isRelatedTo, COMMA_DELIMITER, COLON_DELIMITER),
parseKeyStrValueObj(infofilters, COMMA_DELIMITER, COLON_DELIMITER), parseKeyStrValueObj(infofilters, COMMA_DELIMITER, COLON_DELIMITER),
parseKeyStrValueStr(conffilters, COMMA_DELIMITER, COLON_DELIMITER), parseKeyStrValueStr(conffilters, COMMA_DELIMITER, COLON_DELIMITER),
parseValuesStr(metricfilters, COMMA_DELIMITER), parseMetricFilters(metricfilters, COMMA_DELIMITER),
parseValuesStr(eventfilters, COMMA_DELIMITER)); parseValuesStr(eventfilters, COMMA_DELIMITER));
} }
@ -114,22 +118,26 @@ static TimelineDataToRetrieve createTimelineDataToRetrieve(String confs,
* @param delimiter string is delimited by this delimiter. * @param delimiter string is delimited by this delimiter.
* @return set of strings. * @return set of strings.
*/ */
static Set<String> parseValuesStr(String str, String delimiter) { static TimelineFilterList parseValuesStr(String str, String delimiter) {
if (str == null || str.isEmpty()) { if (str == null || str.isEmpty()) {
return null; return null;
} }
Set<String> strSet = new HashSet<String>(); TimelineFilterList filterList = new TimelineFilterList();
String[] strs = str.split(delimiter); String[] strs = str.split(delimiter);
for (String aStr : strs) { for (String aStr : strs) {
strSet.add(aStr.trim()); filterList.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL,
aStr.trim()));
} }
return strSet; return filterList;
} }
@SuppressWarnings("unchecked") private static TimelineFilterList parseKeyValues(String str,
private static <T> void parseKeyValues(Map<String, T> map, String str,
String pairsDelim, String keyValuesDelim, boolean stringValue, String pairsDelim, String keyValuesDelim, boolean stringValue,
boolean multipleValues) { boolean multipleValues) {
if (str == null) {
return null;
}
TimelineFilterList list = new TimelineFilterList();
String[] pairs = str.split(pairsDelim); String[] pairs = str.split(pairsDelim);
for (String pair : pairs) { for (String pair : pairs) {
if (pair == null || pair.trim().isEmpty()) { if (pair == null || pair.trim().isEmpty()) {
@ -143,23 +151,28 @@ private static <T> void parseKeyValues(Map<String, T> map, String str,
try { try {
Object value = Object value =
GenericObjectMapper.OBJECT_READER.readValue(pairStrs[1].trim()); GenericObjectMapper.OBJECT_READER.readValue(pairStrs[1].trim());
map.put(pairStrs[0].trim(), (T) value); list.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL,
pairStrs[0].trim(), value));
} catch (IOException e) { } catch (IOException e) {
map.put(pairStrs[0].trim(), (T) pairStrs[1].trim()); list.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL,
pairStrs[0].trim(), pairStrs[1].trim()));
} }
} else { } else {
String key = pairStrs[0].trim(); String key = pairStrs[0].trim();
if (multipleValues) { if (multipleValues) {
Set<String> values = new HashSet<String>(); Set<Object> values = new HashSet<Object>();
for (int i = 1; i < pairStrs.length; i++) { for (int i = 1; i < pairStrs.length; i++) {
values.add(pairStrs[i].trim()); values.add(pairStrs[i].trim());
} }
map.put(key, (T) values); list.addFilter(new TimelineKeyValuesFilter(
TimelineCompareOp.EQUAL, key, values));
} else { } else {
map.put(key, (T) pairStrs[1].trim()); list.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL,
key, pairStrs[1].trim()));
} }
} }
} }
return list;
} }
/** /**
@ -175,14 +188,9 @@ private static <T> void parseKeyValues(Map<String, T> map, String str,
* @param keyValuesDelim values for a key are delimited by this delimiter. * @param keyValuesDelim values for a key are delimited by this delimiter.
* @return a map of key-values with each key having a set of values. * @return a map of key-values with each key having a set of values.
*/ */
static Map<String, Set<String>> parseKeyStrValuesStr(String str, static TimelineFilterList parseKeyStrValuesStr(String str, String pairsDelim,
String pairsDelim, String keyValuesDelim) { String keyValuesDelim) {
if (str == null) { return parseKeyValues(str, pairsDelim, keyValuesDelim, true, true);
return null;
}
Map<String, Set<String>> map = new HashMap<String, Set<String>>();
parseKeyValues(map, str, pairsDelim, keyValuesDelim, true, true);
return map;
} }
/** /**
@ -195,14 +203,9 @@ static Map<String, Set<String>> parseKeyStrValuesStr(String str,
* @param keyValDelim key and value are delimited by this delimiter. * @param keyValDelim key and value are delimited by this delimiter.
* @return a map of key-value pairs with both key and value being strings. * @return a map of key-value pairs with both key and value being strings.
*/ */
static Map<String, String> parseKeyStrValueStr(String str, static TimelineFilterList parseKeyStrValueStr(String str, String pairsDelim,
String pairsDelim, String keyValDelim) { String keyValDelim) {
if (str == null) { return parseKeyValues(str, pairsDelim, keyValDelim, true, false);
return null;
}
Map<String, String> map = new HashMap<String, String>();
parseKeyValues(map, str, pairsDelim, keyValDelim, true, false);
return map;
} }
/** /**
@ -216,14 +219,9 @@ static Map<String, String> parseKeyStrValueStr(String str,
* @return a map of key-value pairs with key being a string and value, any * @return a map of key-value pairs with key being a string and value, any
* object. * object.
*/ */
static Map<String, Object> parseKeyStrValueObj(String str, static TimelineFilterList parseKeyStrValueObj(String str, String pairsDelim,
String pairsDelim, String keyValDelim) { String keyValDelim) {
if (str == null) { return parseKeyValues(str, pairsDelim, keyValDelim, false, false);
return null;
}
Map<String, Object> map = new HashMap<String, Object>();
parseKeyValues(map, str, pairsDelim, keyValDelim, false, false);
return map;
} }
/** /**
@ -247,6 +245,20 @@ static EnumSet<Field> parseFieldsStr(String str, String delimiter) {
return fieldList; return fieldList;
} }
static TimelineFilterList parseMetricFilters(String str,
String delimiter) {
if (str == null || str.isEmpty()) {
return null;
}
TimelineFilterList list = new TimelineFilterList();
String[] strs = str.split(delimiter);
for (String aStr : strs) {
list.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, aStr.trim(), 0L));
}
return list;
}
/** /**
* Interpret passed string as a long. * Interpret passed string as a long.
* @param str Passed string. * @param str Passed string.

View File

@ -29,17 +29,27 @@
@Unstable @Unstable
public class TimelineCompareFilter extends TimelineFilter { public class TimelineCompareFilter extends TimelineFilter {
private TimelineCompareOp compareOp; private final TimelineCompareOp compareOp;
private String key; private final String key;
private Object value; private final Object value;
// If comparison operator is NOT_EQUAL, this flag decides if we should return
// the entity if key does not exist.
private final boolean keyMustExist;
public TimelineCompareFilter() { public TimelineCompareFilter(TimelineCompareOp op, String key, Object val,
} boolean keyMustExistFlag) {
public TimelineCompareFilter(TimelineCompareOp op, String key, Object val) {
this.compareOp = op; this.compareOp = op;
this.key = key; this.key = key;
this.value = val; this.value = val;
if (op == TimelineCompareOp.NOT_EQUAL) {
this.keyMustExist = keyMustExistFlag;
} else {
this.keyMustExist = true;
}
}
public TimelineCompareFilter(TimelineCompareOp op, String key, Object val) {
this(op, key, val, true);
} }
@Override @Override
@ -58,4 +68,15 @@ public String getKey() {
public Object getValue() { public Object getValue() {
return value; return value;
} }
public boolean getKeyMustExist() {
return keyMustExist;
}
@Override
public String toString() {
return String.format("%s (%s, %s:%s:%b)",
this.getClass().getSimpleName(), this.compareOp.name(),
this.key, this.value, this.keyMustExist);
}
} }

View File

@ -0,0 +1,62 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
* Filter class which represents filter to be applied based on existence of a
* value.
*/
@Private
@Unstable
public class TimelineExistsFilter extends TimelineFilter {
private final TimelineCompareOp compareOp;
private final String value;
public TimelineExistsFilter(TimelineCompareOp op, String value) {
this.value = value;
if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) {
throw new IllegalArgumentException("CompareOp for exists filter should " +
"be EQUAL or NOT_EQUAL");
}
this.compareOp = op;
}
@Override
public TimelineFilterType getFilterType() {
return TimelineFilterType.EXISTS;
}
public String getValue() {
return value;
}
public TimelineCompareOp getCompareOp() {
return compareOp;
}
@Override
public String toString() {
return String.format("%s (%s %s)",
this.getClass().getSimpleName(), this.compareOp.name(), this.value);
}
}

View File

@ -39,13 +39,25 @@ public enum TimelineFilterType {
*/ */
LIST, LIST,
/** /**
* Filter which is used for comparison. * Filter which is used for key-value comparison.
*/ */
COMPARE, COMPARE,
/**
* Filter which is used for checking key-value equality.
*/
KEY_VALUE,
/**
* Filter which is used for checking key-multiple values equality.
*/
KEY_VALUES,
/** /**
* Filter which matches prefix for a config or a metric. * Filter which matches prefix for a config or a metric.
*/ */
PREFIX PREFIX,
/**
* Filter which checks existence of a value.
*/
EXISTS
} }
public abstract TimelineFilterType getFilterType(); public abstract TimelineFilterType getFilterType();

View File

@ -53,6 +53,14 @@ public TimelineFilterList(TimelineFilter...filters) {
this(Operator.AND, filters); this(Operator.AND, filters);
} }
public TimelineFilterList() {
this(Operator.AND);
}
public TimelineFilterList(Operator op) {
this.operator = op;
}
public TimelineFilterList(Operator op, TimelineFilter...filters) { public TimelineFilterList(Operator op, TimelineFilter...filters) {
this.operator = op; this.operator = op;
this.filterList = new ArrayList<TimelineFilter>(Arrays.asList(filters)); this.filterList = new ArrayList<TimelineFilter>(Arrays.asList(filters));
@ -88,4 +96,10 @@ public void setOperator(Operator op) {
public void addFilter(TimelineFilter filter) { public void addFilter(TimelineFilter filter) {
filterList.add(filter); filterList.add(filter);
} }
@Override
public String toString() {
return String.format("TimelineFilterList %s (%d): %s",
this.operator, this.filterList.size(), this.filterList.toString());
}
} }

View File

@ -18,25 +18,40 @@
package org.apache.hadoop.yarn.server.timelineservice.reader.filter; package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix;
import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
/** /**
* Set of utility methods used by timeline filter classes. * Set of utility methods used by timeline filter classes.
*/ */
public final class TimelineFilterUtils { public final class TimelineFilterUtils {
private static final Log LOG = LogFactory.getLog(TimelineFilterUtils.class);
private TimelineFilterUtils() { private TimelineFilterUtils() {
} }
/** /**
* Returns the equivalent HBase filter list's {@link Operator}. * Returns the equivalent HBase filter list's {@link Operator}.
* @param op *
* @param op timeline filter list operator.
* @return HBase filter list's Operator. * @return HBase filter list's Operator.
*/ */
private static Operator getHBaseOperator(TimelineFilterList.Operator op) { private static Operator getHBaseOperator(TimelineFilterList.Operator op) {
@ -52,7 +67,8 @@ private static Operator getHBaseOperator(TimelineFilterList.Operator op) {
/** /**
* Returns the equivalent HBase compare filter's {@link CompareOp}. * Returns the equivalent HBase compare filter's {@link CompareOp}.
* @param op *
* @param op timeline compare op.
* @return HBase compare filter's CompareOp. * @return HBase compare filter's CompareOp.
*/ */
private static CompareOp getHBaseCompareOp( private static CompareOp getHBaseCompareOp(
@ -89,6 +105,159 @@ private static <T> Filter createHBaseColQualPrefixFilter(
colPrefix.getColumnPrefixBytes(filter.getPrefix()))); colPrefix.getColumnPrefixBytes(filter.getPrefix())));
} }
/**
* Create a HBase {@link QualifierFilter} for the passed column prefix and
* compare op.
*
* @param <T> Describes the type of column prefix.
* @param compareOp compare op.
* @param columnPrefix column prefix.
* @return a column qualifier filter.
*/
public static <T> Filter createHBaseQualifierFilter(CompareOp compareOp,
ColumnPrefix<T> columnPrefix) {
return new QualifierFilter(compareOp,
new BinaryPrefixComparator(
columnPrefix.getColumnPrefixBytes("")));
}
/**
* Create filters for confs or metrics to retrieve. This list includes a
* configs/metrics family filter and relevant filters for confs/metrics to
* retrieve, if present.
*
* @param <T> Describes the type of column prefix.
* @param confsOrMetricToRetrieve configs/metrics to retrieve.
* @param columnFamily config or metric column family.
* @param columnPrefix config or metric column prefix.
* @return a filter list.
* @throws IOException if any problem occurs while creating the filters.
*/
public static <T> Filter createFilterForConfsOrMetricsToRetrieve(
TimelineFilterList confsOrMetricToRetrieve, ColumnFamily<T> columnFamily,
ColumnPrefix<T> columnPrefix) throws IOException {
Filter familyFilter = new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(columnFamily.getBytes()));
if (confsOrMetricToRetrieve != null &&
!confsOrMetricToRetrieve.getFilterList().isEmpty()) {
// If confsOrMetricsToRetrive are specified, create a filter list based
// on it and family filter.
FilterList filter = new FilterList(familyFilter);
filter.addFilter(
createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve));
return filter;
} else {
// Only the family filter needs to be added.
return familyFilter;
}
}
/**
* Create 2 HBase {@link SingleColumnValueFilter} filters for the specified
* value range represented by start and end value and wraps them inside a
* filter list. Start and end value should not be null.
*
* @param <T> Describes the type of column prefix.
* @param column Column for which single column value filter is to be created.
* @param startValue Start value.
* @param endValue End value.
* @return 2 single column value filters wrapped in a filter list.
* @throws IOException if any problem is encountered while encoding value.
*/
public static <T> FilterList createSingleColValueFiltersByRange(
Column<T> column, Object startValue, Object endValue) throws IOException {
FilterList list = new FilterList();
Filter singleColValFilterStart = createHBaseSingleColValueFilter(
column.getColumnFamilyBytes(), column.getColumnQualifierBytes(),
column.getValueConverter().encodeValue(startValue),
CompareOp.GREATER_OR_EQUAL, true);
list.addFilter(singleColValFilterStart);
Filter singleColValFilterEnd = createHBaseSingleColValueFilter(
column.getColumnFamilyBytes(), column.getColumnQualifierBytes(),
column.getValueConverter().encodeValue(endValue),
CompareOp.LESS_OR_EQUAL, true);
list.addFilter(singleColValFilterEnd);
return list;
}
/**
* Creates a HBase {@link SingleColumnValueFilter}.
*
* @param columnFamily Column Family represented as bytes.
* @param columnQualifier Column Qualifier represented as bytes.
* @param value Value.
* @param compareOp Compare operator.
* @param filterIfMissing This flag decides if we should filter the row if the
* specified column is missing. This is based on the filter's keyMustExist
* field.
* @return a {@link SingleColumnValueFilter} object
* @throws IOException
*/
private static SingleColumnValueFilter createHBaseSingleColValueFilter(
byte[] columnFamily, byte[] columnQualifier, byte[] value,
CompareOp compareOp, boolean filterIfMissing) throws IOException {
SingleColumnValueFilter singleColValFilter =
new SingleColumnValueFilter(columnFamily, columnQualifier, compareOp,
new BinaryComparator(value));
singleColValFilter.setLatestVersionOnly(true);
singleColValFilter.setFilterIfMissing(filterIfMissing);
return singleColValFilter;
}
/**
* Create a filter list of qualifier filters based on passed set of columns.
*
* @param <T> Describes the type of column prefix.
* @param colPrefix Column Prefix.
* @param columns set of column qualifiers.
* @return filter list.
*/
public static <T> FilterList createFiltersFromColumnQualifiers(
ColumnPrefix<T> colPrefix, Set<String> columns) {
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
for (String column : columns) {
// For columns which have compound column qualifiers (eg. events), we need
// to include the required separator.
byte[] compoundColQual =
colPrefix.getCompoundColQualBytes(column, (byte[])null);
list.addFilter(new QualifierFilter(CompareOp.EQUAL,
new BinaryPrefixComparator(
colPrefix.getColumnPrefixBytes(compoundColQual))));
}
return list;
}
/**
* Fetch columns from filter list containing exists and multivalue equality
* filters. This is done to fetch only required columns from back-end and
* then match event filters or relationships in reader.
*
* @param filterList filter list.
* @return set of columns.
*/
public static Set<String> fetchColumnsFromFilterList(
TimelineFilterList filterList) {
Set<String> strSet = new HashSet<String>();
for (TimelineFilter filter : filterList.getFilterList()) {
switch(filter.getFilterType()) {
case LIST:
strSet.addAll(fetchColumnsFromFilterList((TimelineFilterList)filter));
break;
case KEY_VALUES:
strSet.add(((TimelineKeyValuesFilter)filter).getKey());
break;
case EXISTS:
strSet.add(((TimelineExistsFilter)filter).getValue());
break;
default:
LOG.info("Unexpected filter type " + filter.getFilterType());
break;
}
}
return strSet;
}
/** /**
* Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList} * Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList}
* while converting different timeline filters(of type {@link TimelineFilter}) * while converting different timeline filters(of type {@link TimelineFilter})
@ -98,22 +267,45 @@ private static <T> Filter createHBaseColQualPrefixFilter(
* @param colPrefix column prefix which will be used for conversion. * @param colPrefix column prefix which will be used for conversion.
* @param filterList timeline filter list which has to be converted. * @param filterList timeline filter list which has to be converted.
* @return A {@link FilterList} object. * @return A {@link FilterList} object.
* @throws IOException if any problem occurs while creating the filter list.
*/ */
public static <T> FilterList createHBaseFilterList(ColumnPrefix<T> colPrefix, public static <T> FilterList createHBaseFilterList(ColumnPrefix<T> colPrefix,
TimelineFilterList filterList) { TimelineFilterList filterList) throws IOException {
FilterList list = FilterList list =
new FilterList(getHBaseOperator(filterList.getOperator())); new FilterList(getHBaseOperator(filterList.getOperator()));
for (TimelineFilter filter : filterList.getFilterList()) { for (TimelineFilter filter : filterList.getFilterList()) {
switch(filter.getFilterType()) { switch(filter.getFilterType()) {
case LIST: case LIST:
list.addFilter( list.addFilter(createHBaseFilterList(colPrefix,
createHBaseFilterList(colPrefix, (TimelineFilterList)filter)); (TimelineFilterList)filter));
break; break;
case PREFIX: case PREFIX:
list.addFilter(createHBaseColQualPrefixFilter( list.addFilter(createHBaseColQualPrefixFilter(colPrefix,
colPrefix, (TimelinePrefixFilter)filter)); (TimelinePrefixFilter)filter));
break;
case COMPARE:
TimelineCompareFilter compareFilter = (TimelineCompareFilter)filter;
list.addFilter(
createHBaseSingleColValueFilter(
colPrefix.getColumnFamilyBytes(),
colPrefix.getColumnPrefixBytes(compareFilter.getKey()),
colPrefix.getValueConverter().
encodeValue(compareFilter.getValue()),
getHBaseCompareOp(compareFilter.getCompareOp()),
compareFilter.getKeyMustExist()));
break;
case KEY_VALUE:
TimelineKeyValueFilter kvFilter = (TimelineKeyValueFilter)filter;
list.addFilter(
createHBaseSingleColValueFilter(
colPrefix.getColumnFamilyBytes(),
colPrefix.getColumnPrefixBytes(kvFilter.getKey()),
colPrefix.getValueConverter().encodeValue(kvFilter.getValue()),
getHBaseCompareOp(kvFilter.getCompareOp()),
kvFilter.getKeyMustExist()));
break; break;
default: default:
LOG.info("Unexpected filter type " + filter.getFilterType());
break; break;
} }
} }

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
* Filter class which represents filter to be applied based on key-value pair
* being equal or not to the values in back-end store.
*/
@Private
@Unstable
public class TimelineKeyValueFilter extends TimelineCompareFilter {
public TimelineKeyValueFilter(TimelineCompareOp op, String key, Object val,
boolean keyMustExistFlag) {
super(op, key, val, keyMustExistFlag);
if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) {
throw new IllegalArgumentException("TimelineCompareOp for equality"
+ " filter should be EQUAL or NOT_EQUAL");
}
}
public TimelineKeyValueFilter(TimelineCompareOp op, String key, Object val) {
this(op, key, val, true);
}
@Override
public TimelineFilterType getFilterType() {
return TimelineFilterType.KEY_VALUE;
}
}

View File

@ -0,0 +1,71 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
* Filter class which represents filter to be applied based on multiple values
* for a key and these values being equal or not equal to values in back-end
* store.
*/
@Private
@Unstable
public class TimelineKeyValuesFilter extends TimelineFilter {
private final TimelineCompareOp compareOp;
private final String key;
private final Set<Object> values;
public TimelineKeyValuesFilter(TimelineCompareOp op, String key,
Set<Object> values) {
if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) {
throw new IllegalArgumentException("TimelineCompareOp for multi value "
+ "equality filter should be EQUAL or NOT_EQUAL");
}
this.compareOp = op;
this.key = key;
this.values = values;
}
@Override
public TimelineFilterType getFilterType() {
return TimelineFilterType.KEY_VALUES;
}
public String getKey() {
return key;
}
public Set<Object> getValues() {
return values;
}
public TimelineCompareOp getCompareOp() {
return compareOp;
}
@Override
public String toString() {
return String.format("%s (%s, %s:%s)",
this.getClass().getSimpleName(), this.compareOp.name(),
this.key, (values == null) ? "" : values.toString());
}
}

View File

@ -53,4 +53,10 @@ public String getPrefix() {
public TimelineCompareOp getCompareOp() { public TimelineCompareOp getCompareOp() {
return compareOp; return compareOp;
} }
@Override
public String toString() {
return String.format("%s (%s %s)",
this.getClass().getSimpleName(), this.compareOp.name(), this.prefix);
}
} }

View File

@ -296,39 +296,39 @@ public int compare(Long l1, Long l2) {
continue; continue;
} }
if (filters.getRelatesTo() != null && if (filters.getRelatesTo() != null &&
!filters.getRelatesTo().isEmpty() && !filters.getRelatesTo().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchRelations( !TimelineStorageUtils.matchRelatesTo(entity,
entity.getRelatesToEntities(), filters.getRelatesTo())) { filters.getRelatesTo())) {
continue; continue;
} }
if (filters.getIsRelatedTo() != null && if (filters.getIsRelatedTo() != null &&
!filters.getIsRelatedTo().isEmpty() && !filters.getIsRelatedTo().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchRelations( !TimelineStorageUtils.matchIsRelatedTo(entity,
entity.getIsRelatedToEntities(), filters.getIsRelatedTo())) { filters.getIsRelatedTo())) {
continue; continue;
} }
if (filters.getInfoFilters() != null && if (filters.getInfoFilters() != null &&
!filters.getInfoFilters().isEmpty() && !filters.getInfoFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchFilters( !TimelineStorageUtils.matchInfoFilters(entity,
entity.getInfo(), filters.getInfoFilters())) { filters.getInfoFilters())) {
continue; continue;
} }
if (filters.getConfigFilters() != null && if (filters.getConfigFilters() != null &&
!filters.getConfigFilters().isEmpty() && !filters.getConfigFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchFilters( !TimelineStorageUtils.matchConfigFilters(entity,
entity.getConfigs(), filters.getConfigFilters())) { filters.getConfigFilters())) {
continue; continue;
} }
if (filters.getMetricFilters() != null && if (filters.getMetricFilters() != null &&
!filters.getMetricFilters().isEmpty() && !filters.getMetricFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchMetricFilters( !TimelineStorageUtils.matchMetricFilters(entity,
entity.getMetrics(), filters.getMetricFilters())) { filters.getMetricFilters())) {
continue; continue;
} }
if (filters.getEventFilters() != null && if (filters.getEventFilters() != null &&
!filters.getEventFilters().isEmpty() && !filters.getEventFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchEventFilters( !TimelineStorageUtils.matchEventFilters(entity,
entity.getEvents(), filters.getEventFilters())) { filters.getEventFilters())) {
continue; continue;
} }
TimelineEntity entityToBeReturned = createEntityToBeReturned( TimelineEntity entityToBeReturned = createEntityToBeReturned(

View File

@ -407,36 +407,39 @@ private void storeEvents(byte[] rowKey, Set<TimelineEvent> events,
"! Using the current timestamp"); "! Using the current timestamp");
eventTimestamp = System.currentTimeMillis(); eventTimestamp = System.currentTimeMillis();
} }
byte[] columnQualifierFirst = byte[] eventTs =
Bytes.toBytes(Separator.VALUES.encode(eventId)); Bytes.toBytes(TimelineStorageUtils.invertLong(eventTimestamp));
byte[] columnQualifierWithTsBytes = Separator.VALUES.
join(columnQualifierFirst, Bytes.toBytes(
TimelineStorageUtils.invertLong(eventTimestamp)));
Map<String, Object> eventInfo = event.getInfo(); Map<String, Object> eventInfo = event.getInfo();
if ((eventInfo == null) || (eventInfo.size() == 0)) { if ((eventInfo == null) || (eventInfo.size() == 0)) {
// add separator since event key is empty
byte[] compoundColumnQualifierBytes =
Separator.VALUES.join(columnQualifierWithTsBytes,
null);
if (isApplication) { if (isApplication) {
byte[] compoundColumnQualifierBytes =
ApplicationColumnPrefix.EVENT.
getCompoundColQualBytes(eventId, eventTs, null);
ApplicationColumnPrefix.EVENT.store(rowKey, applicationTable, ApplicationColumnPrefix.EVENT.store(rowKey, applicationTable,
compoundColumnQualifierBytes, null, compoundColumnQualifierBytes, null,
TimelineStorageUtils.EMPTY_BYTES); TimelineStorageUtils.EMPTY_BYTES);
} else { } else {
byte[] compoundColumnQualifierBytes =
EntityColumnPrefix.EVENT.
getCompoundColQualBytes(eventId, eventTs, null);
EntityColumnPrefix.EVENT.store(rowKey, entityTable, EntityColumnPrefix.EVENT.store(rowKey, entityTable,
compoundColumnQualifierBytes, null, compoundColumnQualifierBytes, null,
TimelineStorageUtils.EMPTY_BYTES); TimelineStorageUtils.EMPTY_BYTES);
} }
} else { } else {
for (Map.Entry<String, Object> info : eventInfo.entrySet()) { for (Map.Entry<String, Object> info : eventInfo.entrySet()) {
// eventId?infoKey // eventId?infoKey
byte[] compoundColumnQualifierBytes = byte[] infoKey = Bytes.toBytes(info.getKey());
Separator.VALUES.join(columnQualifierWithTsBytes,
Bytes.toBytes(info.getKey()));
if (isApplication) { if (isApplication) {
byte[] compoundColumnQualifierBytes =
ApplicationColumnPrefix.EVENT.
getCompoundColQualBytes(eventId, eventTs, infoKey);
ApplicationColumnPrefix.EVENT.store(rowKey, applicationTable, ApplicationColumnPrefix.EVENT.store(rowKey, applicationTable,
compoundColumnQualifierBytes, null, info.getValue()); compoundColumnQualifierBytes, null, info.getValue());
} else { } else {
byte[] compoundColumnQualifierBytes =
EntityColumnPrefix.EVENT.
getCompoundColQualBytes(eventId, eventTs, infoKey);
EntityColumnPrefix.EVENT.store(rowKey, entityTable, EntityColumnPrefix.EVENT.store(rowKey, entityTable,
compoundColumnQualifierBytes, null, info.getValue()); compoundColumnQualifierBytes, null, info.getValue());
} }

View File

@ -24,8 +24,11 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
/** /**
@ -41,7 +44,8 @@ public enum ApplicationColumn implements Column<ApplicationTable> {
/** /**
* When the application was created. * When the application was created.
*/ */
CREATED_TIME(ApplicationColumnFamily.INFO, "created_time"), CREATED_TIME(ApplicationColumnFamily.INFO, "created_time",
LongConverter.getInstance()),
/** /**
* The version of the flow that this app belongs to. * The version of the flow that this app belongs to.
@ -55,12 +59,17 @@ public enum ApplicationColumn implements Column<ApplicationTable> {
private ApplicationColumn(ColumnFamily<ApplicationTable> columnFamily, private ApplicationColumn(ColumnFamily<ApplicationTable> columnFamily,
String columnQualifier) { String columnQualifier) {
this(columnFamily, columnQualifier, GenericConverter.getInstance());
}
private ApplicationColumn(ColumnFamily<ApplicationTable> columnFamily,
String columnQualifier, ValueConverter converter) {
this.columnFamily = columnFamily; this.columnFamily = columnFamily;
this.columnQualifier = columnQualifier; this.columnQualifier = columnQualifier;
// Future-proof by ensuring the right column prefix hygiene. // Future-proof by ensuring the right column prefix hygiene.
this.columnQualifierBytes = this.columnQualifierBytes =
Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); Bytes.toBytes(Separator.SPACE.encode(columnQualifier));
this.column = new ColumnHelper<ApplicationTable>(columnFamily); this.column = new ColumnHelper<ApplicationTable>(columnFamily, converter);
} }
/** /**
@ -81,6 +90,21 @@ public Object readResult(Result result) throws IOException {
return column.readResult(result, columnQualifierBytes); return column.readResult(result, columnQualifierBytes);
} }
@Override
public byte[] getColumnQualifierBytes() {
return columnQualifierBytes.clone();
}
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public ValueConverter getValueConverter() {
return column.getValueConverter();
}
/** /**
* Retrieve an {@link ApplicationColumn} given a name, or null if there is no * Retrieve an {@link ApplicationColumn} given a name, or null if there is no
* match. The following holds true: {@code columnFor(x) == columnFor(y)} if * match. The following holds true: {@code columnFor(x) == columnFor(y)} if

View File

@ -56,7 +56,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> {
/** /**
* Lifecycle events for an application. * Lifecycle events for an application.
*/ */
EVENT(ApplicationColumnFamily.INFO, "e"), EVENT(ApplicationColumnFamily.INFO, "e", true),
/** /**
* Config column stores configuration with config key as the column name. * Config column stores configuration with config key as the column name.
@ -78,6 +78,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> {
*/ */
private final String columnPrefix; private final String columnPrefix;
private final byte[] columnPrefixBytes; private final byte[] columnPrefixBytes;
private final boolean compoundColQual;
/** /**
* Private constructor, meant to be used by the enum definition. * Private constructor, meant to be used by the enum definition.
@ -87,7 +88,18 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> {
*/ */
private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily, private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily,
String columnPrefix) { String columnPrefix) {
this(columnFamily, columnPrefix, GenericConverter.getInstance()); this(columnFamily, columnPrefix, false, GenericConverter.getInstance());
}
private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily,
String columnPrefix, boolean compoundColQual) {
this(columnFamily, columnPrefix, compoundColQual,
GenericConverter.getInstance());
}
private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily,
String columnPrefix, ValueConverter converter) {
this(columnFamily, columnPrefix, false, converter);
} }
/** /**
@ -99,7 +111,7 @@ private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily,
* this column prefix. * this column prefix.
*/ */
private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily, private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily,
String columnPrefix, ValueConverter converter) { String columnPrefix, boolean compoundColQual, ValueConverter converter) {
column = new ColumnHelper<ApplicationTable>(columnFamily, converter); column = new ColumnHelper<ApplicationTable>(columnFamily, converter);
this.columnFamily = columnFamily; this.columnFamily = columnFamily;
this.columnPrefix = columnPrefix; this.columnPrefix = columnPrefix;
@ -110,6 +122,7 @@ private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily,
this.columnPrefixBytes = this.columnPrefixBytes =
Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); Bytes.toBytes(Separator.SPACE.encode(columnPrefix));
} }
this.compoundColQual = compoundColQual;
} }
/** /**
@ -131,6 +144,20 @@ public byte[] getColumnPrefixBytes(String qualifierPrefix) {
this.columnPrefixBytes, qualifierPrefix); this.columnPrefixBytes, qualifierPrefix);
} }
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public byte[] getCompoundColQualBytes(String qualifier,
byte[]...components) {
if (!compoundColQual) {
return ColumnHelper.getColumnQualifier(null, qualifier);
}
return ColumnHelper.getCompoundColumnQualifierBytes(qualifier, components);
}
/* /*
* (non-Javadoc) * (non-Javadoc)
* *
@ -196,6 +223,10 @@ public Object readResult(Result result, String qualifier) throws IOException {
return column.readResult(result, columnQualifier); return column.readResult(result, columnQualifier);
} }
public ValueConverter getValueConverter() {
return column.getValueConverter();
}
/* /*
* (non-Javadoc) * (non-Javadoc)
* *

View File

@ -25,6 +25,7 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
import java.io.IOException; import java.io.IOException;
@ -71,6 +72,11 @@ private String getColumnQualifier() {
return columnQualifier; return columnQualifier;
} }
@Override
public byte[] getColumnQualifierBytes() {
return columnQualifierBytes.clone();
}
public void store(byte[] rowKey, public void store(byte[] rowKey,
TypedBufferedMutator<AppToFlowTable> tableMutator, Long timestamp, TypedBufferedMutator<AppToFlowTable> tableMutator, Long timestamp,
Object inputValue, Attribute... attributes) throws IOException { Object inputValue, Attribute... attributes) throws IOException {
@ -78,6 +84,16 @@ public void store(byte[] rowKey,
inputValue, attributes); inputValue, attributes);
} }
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public ValueConverter getValueConverter() {
return column.getValueConverter();
}
public Object readResult(Result result) throws IOException { public Object readResult(Result result) throws IOException {
return column.readResult(result, columnQualifierBytes); return column.readResult(result, columnQualifierBytes);
} }

View File

@ -60,4 +60,21 @@ void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
*/ */
Object readResult(Result result) throws IOException; Object readResult(Result result) throws IOException;
/**
* Returns column family name(as bytes) associated with this column.
* @return a byte array encoding column family for this column qualifier.
*/
byte[] getColumnFamilyBytes();
/**
* Get byte representation for this column qualifier.
* @return a byte array representing column qualifier.
*/
byte[] getColumnQualifierBytes();
/**
* Returns value converter implementation associated with this column.
* @return a {@link ValueConverter} implementation.
*/
ValueConverter getValueConverter();
} }

View File

@ -362,6 +362,22 @@ public static byte[] getColumnQualifier(byte[] columnPrefixBytes,
return columnQualifier; return columnQualifier;
} }
/**
* Create a compound column qualifier by combining qualifier and components.
*
* @param qualifier Column QUalifier.
* @param components Other components.
* @return a byte array representing compound column qualifier.
*/
public static byte[] getCompoundColumnQualifierBytes(String qualifier,
byte[]...components) {
byte[] colQualBytes = Bytes.toBytes(Separator.VALUES.encode(qualifier));
for (int i = 0; i < components.length; i++) {
colQualBytes = Separator.VALUES.join(colQualBytes, components[i]);
}
return colQualBytes;
}
/** /**
* @param columnPrefixBytes The byte representation for the column prefix. * @param columnPrefixBytes The byte representation for the column prefix.
* Should not contain {@link Separator#QUALIFIERS}. * Should not contain {@link Separator#QUALIFIERS}.

View File

@ -111,6 +111,18 @@ void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
<V> NavigableMap<String, NavigableMap<Long, V>> <V> NavigableMap<String, NavigableMap<Long, V>>
readResultsWithTimestamps(Result result) throws IOException; readResultsWithTimestamps(Result result) throws IOException;
/**
* @param result from which to read columns
* @return the latest values of columns in the column family. The column
* qualifier is returned as a list of parts, each part a byte[]. This
* is to facilitate returning byte arrays of values that were not
* Strings. If they can be treated as Strings, you should use
* {@link #readResults(Result)} instead.
* @throws IOException if any problem occurs while reading results.
*/
Map<?, Object> readResultsHavingCompoundColumnQualifiers(Result result)
throws IOException;
/** /**
* @param qualifierPrefix Column qualifier or prefix of qualifier. * @param qualifierPrefix Column qualifier or prefix of qualifier.
* @return a byte array encoding column prefix and qualifier/prefix passed. * @return a byte array encoding column prefix and qualifier/prefix passed.
@ -122,4 +134,27 @@ void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
* @return a byte array encoding column prefix and qualifier/prefix passed. * @return a byte array encoding column prefix and qualifier/prefix passed.
*/ */
byte[] getColumnPrefixBytes(byte[] qualifierPrefix); byte[] getColumnPrefixBytes(byte[] qualifierPrefix);
/**
* Returns column family name(as bytes) associated with this column prefix.
* @return a byte array encoding column family for this prefix.
*/
byte[] getColumnFamilyBytes();
/**
* Returns value converter implementation associated with this column prefix.
* @return a {@link ValueConverter} implementation.
*/
ValueConverter getValueConverter();
/**
* Get compound column qualifier bytes if the column qualifier is a compound
* qualifier. Returns the qualifier passed as bytes if the column is not a
* compound column qualifier.
*
* @param qualifier Column Qualifier.
* @param components Other components.
* @return byte array representing compound column qualifier.
*/
byte[] getCompoundColQualBytes(String qualifier, byte[]...components);
} }

View File

@ -0,0 +1,71 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.storage.common;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter.TimelineFilterType;
/**
* Used to define which filter to match.
*/
enum TimelineEntityFiltersType {
CONFIG {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUE;
}
},
INFO {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUE;
}
},
METRIC {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.COMPARE;
}
},
EVENT {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.EXISTS;
}
},
IS_RELATED_TO {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUES;
}
},
RELATES_TO {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUES;
}
};
/**
* Checks whether filter type is valid for the filter being matched.
*
* @param filterType filter type.
* @return true, if its a valid filter, false otherwise.
*/
abstract boolean isValidFilter(TimelineFilterType filterType);
}

View File

@ -17,21 +17,26 @@
package org.apache.hadoop.yarn.server.timelineservice.storage.common; package org.apache.hadoop.yarn.server.timelineservice.storage.common;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
@ -39,6 +44,15 @@
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter.TimelineFilterType;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
@ -53,6 +67,8 @@ public final class TimelineStorageUtils {
private TimelineStorageUtils() { private TimelineStorageUtils() {
} }
private static final Log LOG = LogFactory.getLog(TimelineStorageUtils.class);
/** empty bytes. */ /** empty bytes. */
public static final byte[] EMPTY_BYTES = new byte[0]; public static final byte[] EMPTY_BYTES = new byte[0];
@ -311,6 +327,21 @@ public static boolean isApplicationFinished(TimelineEntity te) {
return false; return false;
} }
/**
* Check if we have a certain field amongst fields to retrieve. This method
* checks against {@link Field#ALL} as well because that would mean field
* passed needs to be matched.
*
* @param fieldsToRetrieve fields to be retrieved.
* @param requiredField fields to be checked in fieldsToRetrieve.
* @return true if has the required field, false otherwise.
*/
public static boolean hasField(EnumSet<Field> fieldsToRetrieve,
Field requiredField) {
return fieldsToRetrieve.contains(Field.ALL) ||
fieldsToRetrieve.contains(requiredField);
}
/** /**
* Checks if the input TimelineEntity object is an ApplicationEntity. * Checks if the input TimelineEntity object is an ApplicationEntity.
* *
@ -385,87 +416,317 @@ public static Tag getTagFromAttribute(Entry<String, byte[]> attribute) {
} }
/** /**
* Matches key-values filter. Used for relatesTo/isRelatedTo filters.
* *
* @param entityRelations the relations of an entity * @param entity entity which holds relatesTo/isRelatedTo relations which we
* @param relationFilters the relations for filtering * will match against.
* @return a boolean flag to indicate if both match * @param keyValuesFilter key-values filter.
* @param entityFiltersType type of filters we are trying to match.
* @return true, if filter matches, false otherwise.
*/ */
public static boolean matchRelations( private static boolean matchKeyValuesFilter(TimelineEntity entity,
Map<String, Set<String>> entityRelations, TimelineKeyValuesFilter keyValuesFilter,
Map<String, Set<String>> relationFilters) { TimelineEntityFiltersType entityFiltersType) {
for (Map.Entry<String, Set<String>> relation : relationFilters.entrySet()) { Map<String, Set<String>> relations = null;
Set<String> ids = entityRelations.get(relation.getKey()); if (entityFiltersType == TimelineEntityFiltersType.IS_RELATED_TO) {
if (ids == null) { relations = entity.getIsRelatedToEntities();
} else if (entityFiltersType == TimelineEntityFiltersType.RELATES_TO) {
relations = entity.getRelatesToEntities();
}
if (relations == null) {
return false;
}
Set<String> ids = relations.get(keyValuesFilter.getKey());
if (ids == null) {
return false;
}
boolean matched = false;
for (Object id : keyValuesFilter.getValues()) {
// Matches if id is found amongst the relationships for an entity and
// filter's compare op is EQUAL.
// If compare op is NOT_EQUAL, for a match to occur, id should not be
// found amongst relationships for an entity.
matched = !(ids.contains(id) ^
keyValuesFilter.getCompareOp() == TimelineCompareOp.EQUAL);
if (!matched) {
return false; return false;
} }
for (String id : relation.getValue()) { }
if (!ids.contains(id)) { return true;
}
/**
* Matches relatesto.
*
* @param entity entity which holds relatesto relations.
* @param relatesTo the relations for filtering.
* @return true, if filter matches, false otherwise.
* @throws IOException if an unsupported filter for matching relations is
* being matched.
*/
public static boolean matchRelatesTo(TimelineEntity entity,
TimelineFilterList relatesTo) throws IOException {
return matchFilters(
entity, relatesTo, TimelineEntityFiltersType.RELATES_TO);
}
/**
* Matches isrelatedto.
*
* @param entity entity which holds isRelatedTo relations.
* @param isRelatedTo the relations for filtering.
* @return true, if filter matches, false otherwise.
* @throws IOException if an unsupported filter for matching relations is
* being matched.
*/
public static boolean matchIsRelatedTo(TimelineEntity entity,
TimelineFilterList isRelatedTo) throws IOException {
return matchFilters(
entity, isRelatedTo, TimelineEntityFiltersType.IS_RELATED_TO);
}
/**
* Matches key-value filter. Used for config and info filters.
*
* @param entity entity which holds the config/info which we will match
* against.
* @param kvFilter a key-value filter.
* @param entityFiltersType type of filters we are trying to match.
* @return true, if filter matches, false otherwise.
*/
private static boolean matchKeyValueFilter(TimelineEntity entity,
TimelineKeyValueFilter kvFilter,
TimelineEntityFiltersType entityFiltersType) {
Map<String, ? extends Object> map = null;
// Supported only for config and info filters.
if (entityFiltersType == TimelineEntityFiltersType.CONFIG) {
map = entity.getConfigs();
} else if (entityFiltersType == TimelineEntityFiltersType.INFO) {
map = entity.getInfo();
}
if (map == null) {
return false;
}
Object value = map.get(kvFilter.getKey());
if (value == null) {
return false;
}
// Matches if filter's value is equal to the value of the key and filter's
// compare op is EQUAL.
// If compare op is NOT_EQUAL, for a match to occur, value should not be
// equal to the value of the key.
return !(value.equals(kvFilter.getValue()) ^
kvFilter.getCompareOp() == TimelineCompareOp.EQUAL);
}
/**
* Matches config filters.
*
* @param entity entity which holds a map of config key-value pairs.
* @param configFilters list of info filters.
* @return a boolean flag to indicate if both match.
* @throws IOException if an unsupported filter for matching config filters is
* being matched.
*/
public static boolean matchConfigFilters(TimelineEntity entity,
TimelineFilterList configFilters) throws IOException {
return
matchFilters(entity, configFilters, TimelineEntityFiltersType.CONFIG);
}
/**
* Matches info filters.
*
* @param entity entity which holds a map of info key-value pairs.
* @param infoFilters list of info filters.
* @return a boolean flag to indicate if both match.
* @throws IOException if an unsupported filter for matching info filters is
* being matched.
*/
public static boolean matchInfoFilters(TimelineEntity entity,
TimelineFilterList infoFilters) throws IOException {
return matchFilters(entity, infoFilters, TimelineEntityFiltersType.INFO);
}
/**
* Matches exists filter. Used for event filters.
*
* @param entity entity which holds the events which we will match against.
* @param existsFilter exists filter.
* @param entityFiltersType type of filters we are trying to match.
* @return true, if filter matches, false otherwise.
*/
private static boolean matchExistsFilter(TimelineEntity entity,
TimelineExistsFilter existsFilter,
TimelineEntityFiltersType entityFiltersType) {
// Currently exists filter is only supported for event filters.
if (entityFiltersType != TimelineEntityFiltersType.EVENT) {
return false;
}
Set<String> eventIds = new HashSet<String>();
for (TimelineEvent event : entity.getEvents()) {
eventIds.add(event.getId());
}
// Matches if filter's value is contained in the list of events filter's
// compare op is EQUAL.
// If compare op is NOT_EQUAL, for a match to occur, value should not be
// contained in the list of events.
return !(eventIds.contains(existsFilter.getValue()) ^
existsFilter.getCompareOp() == TimelineCompareOp.EQUAL);
}
/**
* Matches event filters.
*
* @param entity entity which holds a set of event objects.
* @param eventFilters the set of event Ids for filtering.
* @return a boolean flag to indicate if both match.
* @throws IOException if an unsupported filter for matching event filters is
* being matched.
*/
public static boolean matchEventFilters(TimelineEntity entity,
TimelineFilterList eventFilters) throws IOException {
return matchFilters(entity, eventFilters, TimelineEntityFiltersType.EVENT);
}
/**
* Compare two values based on comparison operator.
*
* @param compareOp comparison operator.
* @param val1 value 1.
* @param val2 value 2.
* @return true, if relation matches, false otherwise
*/
private static boolean compareValues(TimelineCompareOp compareOp,
long val1, long val2) {
switch (compareOp) {
case LESS_THAN:
return val1 < val2;
case LESS_OR_EQUAL:
return val1 <= val2;
case EQUAL:
return val1 == val2;
case NOT_EQUAL:
return val1 != val2;
case GREATER_OR_EQUAL:
return val1 >= val2;
case GREATER_THAN:
return val1 > val2;
default:
throw new RuntimeException("Unknown TimelineCompareOp " +
compareOp.name());
}
}
/**
* Matches compare filter. Used for metric filters.
*
* @param entity entity which holds the metrics which we will match against.
* @param compareFilter compare filter.
* @param entityFiltersType type of filters we are trying to match.
* @return true, if filter matches, false otherwise.
* @throws IOException if metric filters holds non integral values.
*/
private static boolean matchCompareFilter(TimelineEntity entity,
TimelineCompareFilter compareFilter,
TimelineEntityFiltersType entityFiltersType) throws IOException {
// Currently exists filter is only supported for metric filters.
if (entityFiltersType != TimelineEntityFiltersType.METRIC) {
return false;
}
// We expect only integral values(short/int/long) for metric filters.
if (!isIntegralValue(compareFilter.getValue())) {
throw new IOException("Metric filters has non integral values");
}
Map<String, TimelineMetric> metricMap =
new HashMap<String, TimelineMetric>();
for (TimelineMetric metric : entity.getMetrics()) {
metricMap.put(metric.getId(), metric);
}
TimelineMetric metric = metricMap.get(compareFilter.getKey());
if (metric == null) {
return false;
}
// We will be using the latest value of metric to compare.
return compareValues(compareFilter.getCompareOp(),
metric.getValuesJAXB().firstEntry().getValue().longValue(),
((Number)compareFilter.getValue()).longValue());
}
/**
* Matches metric filters.
*
* @param entity entity which holds a set of metric objects.
* @param metricFilters list of metric filters.
* @return a boolean flag to indicate if both match.
* @throws IOException if an unsupported filter for matching metric filters is
* being matched.
*/
public static boolean matchMetricFilters(TimelineEntity entity,
TimelineFilterList metricFilters) throws IOException {
return matchFilters(
entity, metricFilters, TimelineEntityFiltersType.METRIC);
}
/**
* Common routine to match different filters. Iterates over a filter list and
* calls routines based on filter type.
*
* @param entity Timeline entity.
* @param filters filter list.
* @param entityFiltersType type of filters which are being matched.
* @return a boolean flag to indicate if filter matches.
* @throws IOException if an unsupported filter for matching this specific
* filter is being matched.
*/
private static boolean matchFilters(TimelineEntity entity,
TimelineFilterList filters, TimelineEntityFiltersType entityFiltersType)
throws IOException {
if (filters == null || filters.getFilterList().isEmpty()) {
return false;
}
TimelineFilterList.Operator operator = filters.getOperator();
for (TimelineFilter filter : filters.getFilterList()) {
TimelineFilterType filterType = filter.getFilterType();
if (!entityFiltersType.isValidFilter(filterType)) {
throw new IOException("Unsupported filter " + filterType);
}
boolean matched = false;
switch (filterType) {
case LIST:
matched = matchFilters(entity, (TimelineFilterList)filter,
entityFiltersType);
break;
case COMPARE:
matched = matchCompareFilter(entity, (TimelineCompareFilter)filter,
entityFiltersType);
break;
case EXISTS:
matched = matchExistsFilter(entity, (TimelineExistsFilter)filter,
entityFiltersType);
break;
case KEY_VALUE:
matched = matchKeyValueFilter(entity, (TimelineKeyValueFilter)filter,
entityFiltersType);
break;
case KEY_VALUES:
matched = matchKeyValuesFilter(entity, (TimelineKeyValuesFilter)filter,
entityFiltersType);
break;
default:
throw new IOException("Unsupported filter " + filterType);
}
if (!matched) {
if(operator == TimelineFilterList.Operator.AND) {
return false; return false;
} }
} else {
if(operator == TimelineFilterList.Operator.OR) {
return true;
}
} }
} }
return true; return operator == TimelineFilterList.Operator.AND;
}
/**
*
* @param map the map of key/value pairs in an entity
* @param filters the map of key/value pairs for filtering
* @return a boolean flag to indicate if both match
*/
public static boolean matchFilters(Map<String, ? extends Object> map,
Map<String, ? extends Object> filters) {
for (Map.Entry<String, ? extends Object> filter : filters.entrySet()) {
Object value = map.get(filter.getKey());
if (value == null) {
return false;
}
if (!value.equals(filter.getValue())) {
return false;
}
}
return true;
}
/**
*
* @param entityEvents the set of event objects in an entity
* @param eventFilters the set of event Ids for filtering
* @return a boolean flag to indicate if both match
*/
public static boolean matchEventFilters(Set<TimelineEvent> entityEvents,
Set<String> eventFilters) {
Set<String> eventIds = new HashSet<String>();
for (TimelineEvent event : entityEvents) {
eventIds.add(event.getId());
}
for (String eventFilter : eventFilters) {
if (!eventIds.contains(eventFilter)) {
return false;
}
}
return true;
}
/**
*
* @param metrics the set of metric objects in an entity
* @param metricFilters the set of metric Ids for filtering
* @return a boolean flag to indicate if both match
*/
public static boolean matchMetricFilters(Set<TimelineMetric> metrics,
Set<String> metricFilters) {
Set<String> metricIds = new HashSet<String>();
for (TimelineMetric metric : metrics) {
metricIds.add(metric.getId());
}
for (String metricFilter : metricFilters) {
if (!metricIds.contains(metricFilter)) {
return false;
}
}
return true;
} }
/** /**
@ -530,4 +791,100 @@ public static String getAggregationCompactionDimension(List<Tag> tags) {
} }
return appId; return appId;
} }
/**
* Helper method for reading relationship.
*
* @param <T> Describes the type of column prefix.
* @param entity entity to fill.
* @param result result from HBase.
* @param prefix column prefix.
* @param isRelatedTo if true, means relationship is to be added to
* isRelatedTo, otherwise its added to relatesTo.
* @throws IOException if any problem is encountered while reading result.
*/
public static <T> void readRelationship(
TimelineEntity entity, Result result, ColumnPrefix<T> prefix,
boolean isRelatedTo) throws IOException {
// isRelatedTo and relatesTo are of type Map<String, Set<String>>
Map<String, Object> columns = prefix.readResults(result);
for (Map.Entry<String, Object> column : columns.entrySet()) {
for (String id : Separator.VALUES.splitEncoded(
column.getValue().toString())) {
if (isRelatedTo) {
entity.addIsRelatedToEntity(column.getKey(), id);
} else {
entity.addRelatesToEntity(column.getKey(), id);
}
}
}
}
/**
* Helper method for reading key-value pairs for either info or config.
*
* @param <T> Describes the type of column prefix.
* @param entity entity to fill.
* @param result result from HBase.
* @param prefix column prefix.
* @param isConfig if true, means we are reading configs, otherwise info.
* @throws IOException if any problem is encountered while reading result.
*/
public static <T> void readKeyValuePairs(
TimelineEntity entity, Result result, ColumnPrefix<T> prefix,
boolean isConfig) throws IOException {
// info and configuration are of type Map<String, Object or String>
Map<String, Object> columns = prefix.readResults(result);
if (isConfig) {
for (Map.Entry<String, Object> column : columns.entrySet()) {
entity.addConfig(column.getKey(), column.getValue().toString());
}
} else {
entity.addInfo(columns);
}
}
/**
* Read events from the entity table or the application table. The column name
* is of the form "eventId=timestamp=infoKey" where "infoKey" may be omitted
* if there is no info associated with the event.
*
* @param <T> Describes the type of column prefix.
* @param entity entity to fill.
* @param result HBase Result.
* @param prefix column prefix.
* @throws IOException if any problem is encountered while reading result.
*/
public static <T> void readEvents(TimelineEntity entity, Result result,
ColumnPrefix<T> prefix) throws IOException {
Map<String, TimelineEvent> eventsMap = new HashMap<>();
Map<?, Object> eventsResult =
prefix.readResultsHavingCompoundColumnQualifiers(result);
for (Map.Entry<?, Object> eventResult : eventsResult.entrySet()) {
byte[][] karr = (byte[][])eventResult.getKey();
// the column name is of the form "eventId=timestamp=infoKey"
if (karr.length == 3) {
String id = Bytes.toString(karr[0]);
long ts = TimelineStorageUtils.invertLong(Bytes.toLong(karr[1]));
String key = Separator.VALUES.joinEncoded(id, Long.toString(ts));
TimelineEvent event = eventsMap.get(key);
if (event == null) {
event = new TimelineEvent();
event.setId(id);
event.setTimestamp(ts);
eventsMap.put(key, event);
}
// handle empty info
String infoKey = karr[2].length == 0 ? null : Bytes.toString(karr[2]);
if (infoKey != null) {
event.addInfo(infoKey, eventResult.getValue());
}
} else {
LOG.warn("incorrectly formatted column name: it will be discarded");
continue;
}
}
Set<TimelineEvent> eventsSet = new HashSet<>(eventsMap.values());
entity.addEvents(eventsSet);
}
} }

View File

@ -24,8 +24,11 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
/** /**
@ -46,7 +49,8 @@ public enum EntityColumn implements Column<EntityTable> {
/** /**
* When the entity was created. * When the entity was created.
*/ */
CREATED_TIME(EntityColumnFamily.INFO, "created_time"), CREATED_TIME(EntityColumnFamily.INFO, "created_time",
LongConverter.getInstance()),
/** /**
* The version of the flow that this entity belongs to. * The version of the flow that this entity belongs to.
@ -60,12 +64,17 @@ public enum EntityColumn implements Column<EntityTable> {
EntityColumn(ColumnFamily<EntityTable> columnFamily, EntityColumn(ColumnFamily<EntityTable> columnFamily,
String columnQualifier) { String columnQualifier) {
this(columnFamily, columnQualifier, GenericConverter.getInstance());
}
EntityColumn(ColumnFamily<EntityTable> columnFamily,
String columnQualifier, ValueConverter converter) {
this.columnFamily = columnFamily; this.columnFamily = columnFamily;
this.columnQualifier = columnQualifier; this.columnQualifier = columnQualifier;
// Future-proof by ensuring the right column prefix hygiene. // Future-proof by ensuring the right column prefix hygiene.
this.columnQualifierBytes = this.columnQualifierBytes =
Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); Bytes.toBytes(Separator.SPACE.encode(columnQualifier));
this.column = new ColumnHelper<EntityTable>(columnFamily); this.column = new ColumnHelper<EntityTable>(columnFamily, converter);
} }
/** /**
@ -108,6 +117,21 @@ public static final EntityColumn columnFor(String columnQualifier) {
return null; return null;
} }
@Override
public byte[] getColumnQualifierBytes() {
return columnQualifierBytes.clone();
}
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public ValueConverter getValueConverter() {
return column.getValueConverter();
}
/** /**
* Retrieve an {@link EntityColumn} given a name, or null if there is no * Retrieve an {@link EntityColumn} given a name, or null if there is no
* match. The following holds true: {@code columnFor(a,x) == columnFor(b,y)} * match. The following holds true: {@code columnFor(a,x) == columnFor(b,y)}

View File

@ -56,7 +56,7 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
/** /**
* Lifecycle events for an entity. * Lifecycle events for an entity.
*/ */
EVENT(EntityColumnFamily.INFO, "e"), EVENT(EntityColumnFamily.INFO, "e", true),
/** /**
* Config column stores configuration with config key as the column name. * Config column stores configuration with config key as the column name.
@ -78,6 +78,7 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
*/ */
private final String columnPrefix; private final String columnPrefix;
private final byte[] columnPrefixBytes; private final byte[] columnPrefixBytes;
private final boolean compoundColQual;
/** /**
* Private constructor, meant to be used by the enum definition. * Private constructor, meant to be used by the enum definition.
@ -87,7 +88,18 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
*/ */
EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily, EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily,
String columnPrefix) { String columnPrefix) {
this(columnFamily, columnPrefix, GenericConverter.getInstance()); this(columnFamily, columnPrefix, false, GenericConverter.getInstance());
}
EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily,
String columnPrefix, boolean compondColQual) {
this(columnFamily, columnPrefix, compondColQual,
GenericConverter.getInstance());
}
EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily,
String columnPrefix, ValueConverter converter) {
this(columnFamily, columnPrefix, false, converter);
} }
/** /**
@ -99,7 +111,7 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
* this column prefix. * this column prefix.
*/ */
EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily, EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily,
String columnPrefix, ValueConverter converter) { String columnPrefix, boolean compondColQual, ValueConverter converter) {
column = new ColumnHelper<EntityTable>(columnFamily, converter); column = new ColumnHelper<EntityTable>(columnFamily, converter);
this.columnFamily = columnFamily; this.columnFamily = columnFamily;
this.columnPrefix = columnPrefix; this.columnPrefix = columnPrefix;
@ -110,6 +122,7 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
this.columnPrefixBytes = this.columnPrefixBytes =
Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); Bytes.toBytes(Separator.SPACE.encode(columnPrefix));
} }
this.compoundColQual = compondColQual;
} }
/** /**
@ -131,6 +144,24 @@ public byte[] getColumnPrefixBytes(String qualifierPrefix) {
this.columnPrefixBytes, qualifierPrefix); this.columnPrefixBytes, qualifierPrefix);
} }
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public ValueConverter getValueConverter() {
return column.getValueConverter();
}
public byte[] getCompoundColQualBytes(String qualifier,
byte[]...components) {
if (!compoundColQual) {
return ColumnHelper.getColumnQualifier(null, qualifier);
}
return ColumnHelper.getCompoundColumnQualifierBytes(qualifier, components);
}
/* /*
* (non-Javadoc) * (non-Javadoc)
* *
@ -287,5 +318,4 @@ public static final EntityColumnPrefix columnFor(
// Default to null // Default to null
return null; return null;
} }
} }

View File

@ -29,6 +29,7 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter;
/** /**
* Identifies partially qualified columns for the {@link FlowActivityTable}. * Identifies partially qualified columns for the {@link FlowActivityTable}.
@ -50,6 +51,7 @@ public enum FlowActivityColumnPrefix
*/ */
private final String columnPrefix; private final String columnPrefix;
private final byte[] columnPrefixBytes; private final byte[] columnPrefixBytes;
private final boolean compoundColQual;
private final AggregationOperation aggOp; private final AggregationOperation aggOp;
@ -64,6 +66,12 @@ public enum FlowActivityColumnPrefix
private FlowActivityColumnPrefix( private FlowActivityColumnPrefix(
ColumnFamily<FlowActivityTable> columnFamily, String columnPrefix, ColumnFamily<FlowActivityTable> columnFamily, String columnPrefix,
AggregationOperation aggOp) { AggregationOperation aggOp) {
this(columnFamily, columnPrefix, aggOp, false);
}
private FlowActivityColumnPrefix(
ColumnFamily<FlowActivityTable> columnFamily, String columnPrefix,
AggregationOperation aggOp, boolean compoundColQual) {
column = new ColumnHelper<FlowActivityTable>(columnFamily); column = new ColumnHelper<FlowActivityTable>(columnFamily);
this.columnFamily = columnFamily; this.columnFamily = columnFamily;
this.columnPrefix = columnPrefix; this.columnPrefix = columnPrefix;
@ -75,6 +83,7 @@ private FlowActivityColumnPrefix(
.encode(columnPrefix)); .encode(columnPrefix));
} }
this.aggOp = aggOp; this.aggOp = aggOp;
this.compoundColQual = compoundColQual;
} }
/** /**
@ -100,6 +109,16 @@ public byte[] getColumnPrefixBytes() {
return columnPrefixBytes.clone(); return columnPrefixBytes.clone();
} }
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public ValueConverter getValueConverter() {
return column.getValueConverter();
}
public AggregationOperation getAttribute() { public AggregationOperation getAttribute() {
return aggOp; return aggOp;
} }
@ -251,4 +270,20 @@ public void store(byte[] rowKey,
column.store(rowKey, tableMutator, columnQualifier, null, inputValue, column.store(rowKey, tableMutator, columnQualifier, null, inputValue,
combinedAttributes); combinedAttributes);
} }
@Override
public byte[] getCompoundColQualBytes(String qualifier,
byte[]...components) {
if (!compoundColQual) {
return ColumnHelper.getColumnQualifier(null, qualifier);
}
return ColumnHelper.getCompoundColumnQualifierBytes(qualifier, components);
}
@Override
public Map<?, Object> readResultsHavingCompoundColumnQualifiers(Result result)
throws IOException {
// There are no compound column qualifiers for flow activity table.
return null;
}
} }

View File

@ -86,10 +86,12 @@ private String getColumnQualifier() {
return columnQualifier; return columnQualifier;
} }
@Override
public byte[] getColumnQualifierBytes() { public byte[] getColumnQualifierBytes() {
return columnQualifierBytes.clone(); return columnQualifierBytes.clone();
} }
@Override
public byte[] getColumnFamilyBytes() { public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes(); return columnFamily.getBytes();
} }
@ -144,6 +146,7 @@ public static final FlowRunColumn columnFor(String columnQualifier) {
return null; return null;
} }
@Override
public ValueConverter getValueConverter() { public ValueConverter getValueConverter() {
return column.getValueConverter(); return column.getValueConverter();
} }

View File

@ -52,6 +52,7 @@ public enum FlowRunColumnPrefix implements ColumnPrefix<FlowRunTable> {
*/ */
private final String columnPrefix; private final String columnPrefix;
private final byte[] columnPrefixBytes; private final byte[] columnPrefixBytes;
private final boolean compoundColQual;
private final AggregationOperation aggOp; private final AggregationOperation aggOp;
@ -65,6 +66,12 @@ public enum FlowRunColumnPrefix implements ColumnPrefix<FlowRunTable> {
*/ */
private FlowRunColumnPrefix(ColumnFamily<FlowRunTable> columnFamily, private FlowRunColumnPrefix(ColumnFamily<FlowRunTable> columnFamily,
String columnPrefix, AggregationOperation fra, ValueConverter converter) { String columnPrefix, AggregationOperation fra, ValueConverter converter) {
this(columnFamily, columnPrefix, fra, converter, false);
}
private FlowRunColumnPrefix(ColumnFamily<FlowRunTable> columnFamily,
String columnPrefix, AggregationOperation fra, ValueConverter converter,
boolean compoundColQual) {
column = new ColumnHelper<FlowRunTable>(columnFamily, converter); column = new ColumnHelper<FlowRunTable>(columnFamily, converter);
this.columnFamily = columnFamily; this.columnFamily = columnFamily;
this.columnPrefix = columnPrefix; this.columnPrefix = columnPrefix;
@ -76,6 +83,7 @@ private FlowRunColumnPrefix(ColumnFamily<FlowRunTable> columnFamily,
.encode(columnPrefix)); .encode(columnPrefix));
} }
this.aggOp = fra; this.aggOp = fra;
this.compoundColQual = compoundColQual;
} }
/** /**
@ -101,6 +109,7 @@ public byte[] getColumnPrefixBytes(String qualifierPrefix) {
this.columnPrefixBytes, qualifierPrefix); this.columnPrefixBytes, qualifierPrefix);
} }
@Override
public byte[] getColumnFamilyBytes() { public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes(); return columnFamily.getBytes();
} }
@ -222,6 +231,7 @@ public static final FlowRunColumnPrefix columnFor(String columnPrefix) {
return null; return null;
} }
@Override
public ValueConverter getValueConverter() { public ValueConverter getValueConverter() {
return column.getValueConverter(); return column.getValueConverter();
} }
@ -257,4 +267,20 @@ public static final FlowRunColumnPrefix columnFor(
// Default to null // Default to null
return null; return null;
} }
@Override
public byte[] getCompoundColQualBytes(String qualifier,
byte[]...components) {
if (!compoundColQual) {
return ColumnHelper.getColumnQualifier(null, qualifier);
}
return ColumnHelper.getCompoundColumnQualifierBytes(qualifier, components);
}
@Override
public Map<?, Object> readResultsHavingCompoundColumnQualifiers(Result result)
throws IOException {
// There are no compound column qualifiers for flow run table.
return null;
}
} }

View File

@ -200,6 +200,7 @@ private boolean nextInternal(List<Cell> cells, int cellLimit)
int addedCnt = 0; int addedCnt = 0;
long currentTimestamp = System.currentTimeMillis(); long currentTimestamp = System.currentTimeMillis();
ValueConverter converter = null; ValueConverter converter = null;
while (cellLimit <= 0 || addedCnt < cellLimit) { while (cellLimit <= 0 || addedCnt < cellLimit) {
cell = peekAtNextCell(cellLimit); cell = peekAtNextCell(cellLimit);
if (cell == null) { if (cell == null) {

View File

@ -19,6 +19,7 @@
import java.io.IOException; import java.io.IOException;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.Set;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
@ -27,7 +28,6 @@
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FamilyFilter; import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
@ -39,6 +39,7 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn;
@ -76,93 +77,231 @@ protected BaseTable<?> getTable() {
return APPLICATION_TABLE; return APPLICATION_TABLE;
} }
/**
* This method is called only for multiple entity reads.
*/
@Override @Override
protected FilterList constructFilterListBasedOnFields() { protected FilterList constructFilterListBasedOnFilters() throws IOException {
FilterList list = new FilterList(Operator.MUST_PASS_ONE); // Filters here cannot be null for multiple entity reads as they are set in
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve(); // augmentParams if null.
// Fetch all the columns. TimelineEntityFilters filters = getFilters();
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) && FilterList listBasedOnFilters = new FilterList();
(dataToRetrieve.getConfsToRetrieve() == null || // Create filter list based on created time range and add it to
dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty()) && // listBasedOnFilters.
(dataToRetrieve.getMetricsToRetrieve() == null || long createdTimeBegin = filters.getCreatedTimeBegin();
dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty())) { long createdTimeEnd = filters.getCreatedTimeEnd();
return list; if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createSingleColValueFiltersByRange(
ApplicationColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd));
} }
// Create filter list based on metric filters and add it to
// listBasedOnFilters.
TimelineFilterList metricFilters = filters.getMetricFilters();
if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createHBaseFilterList(
ApplicationColumnPrefix.METRIC, metricFilters));
}
// Create filter list based on config filters and add it to
// listBasedOnFilters.
TimelineFilterList configFilters = filters.getConfigFilters();
if (configFilters != null && !configFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createHBaseFilterList(
ApplicationColumnPrefix.CONFIG, configFilters));
}
// Create filter list based on info filters and add it to listBasedOnFilters
TimelineFilterList infoFilters = filters.getInfoFilters();
if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createHBaseFilterList(
ApplicationColumnPrefix.INFO, infoFilters));
}
return listBasedOnFilters;
}
/**
* Add {@link QualifierFilter} filters to filter list for each column of
* application table.
*
* @param list filter list to which qualifier filters have to be added.
*/
@Override
protected void updateFixedColumns(FilterList list) {
for (ApplicationColumn column : ApplicationColumn.values()) {
list.addFilter(new QualifierFilter(CompareOp.EQUAL,
new BinaryComparator(column.getColumnQualifierBytes())));
}
}
/**
* Creates a filter list which indicates that only some of the column
* qualifiers in the info column family will be returned in result.
*
* @return filter list.
* @throws IOException if any problem occurs while creating filter list.
*/
private FilterList createFilterListForColsOfInfoFamily()
throws IOException {
FilterList infoFamilyColsFilter = new FilterList(Operator.MUST_PASS_ONE);
// Add filters for each column in entity table.
updateFixedColumns(infoFamilyColsFilter);
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// If INFO field has to be retrieved, add a filter for fetching columns
// with INFO column prefix.
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.INFO)) {
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, ApplicationColumnPrefix.INFO));
}
TimelineFilterList relatesTo = getFilters().getRelatesTo();
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO)) {
// If RELATES_TO field has to be retrieved, add a filter for fetching
// columns with RELATES_TO column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, ApplicationColumnPrefix.RELATES_TO));
} else if (relatesTo != null && !relatesTo.getFilterList().isEmpty()) {
// Even if fields to retrieve does not contain RELATES_TO, we still
// need to have a filter to fetch some of the column qualifiers if
// relatesTo filters are specified. relatesTo filters will then be
// matched after fetching rows from HBase.
Set<String> relatesToCols =
TimelineFilterUtils.fetchColumnsFromFilterList(relatesTo);
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createFiltersFromColumnQualifiers(
ApplicationColumnPrefix.RELATES_TO, relatesToCols));
}
TimelineFilterList isRelatedTo = getFilters().getIsRelatedTo();
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
// If IS_RELATED_TO field has to be retrieved, add a filter for fetching
// columns with IS_RELATED_TO column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, ApplicationColumnPrefix.IS_RELATED_TO));
} else if (isRelatedTo != null && !isRelatedTo.getFilterList().isEmpty()) {
// Even if fields to retrieve does not contain IS_RELATED_TO, we still
// need to have a filter to fetch some of the column qualifiers if
// isRelatedTo filters are specified. isRelatedTo filters will then be
// matched after fetching rows from HBase.
Set<String> isRelatedToCols =
TimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo);
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createFiltersFromColumnQualifiers(
ApplicationColumnPrefix.IS_RELATED_TO, isRelatedToCols));
}
TimelineFilterList eventFilters = getFilters().getEventFilters();
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS)) {
// If EVENTS field has to be retrieved, add a filter for fetching columns
// with EVENT column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, ApplicationColumnPrefix.EVENT));
} else if (eventFilters != null && !eventFilters.getFilterList().isEmpty()){
// Even if fields to retrieve does not contain EVENTS, we still need to
// have a filter to fetch some of the column qualifiers on the basis of
// event filters specified. Event filters will then be matched after
// fetching rows from HBase.
Set<String> eventCols =
TimelineFilterUtils.fetchColumnsFromFilterList(eventFilters);
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createFiltersFromColumnQualifiers(
ApplicationColumnPrefix.EVENT, eventCols));
}
return infoFamilyColsFilter;
}
/**
* Exclude column prefixes via filters which are not required(based on fields
* to retrieve) from info column family. These filters are added to filter
* list which contains a filter for getting info column family.
*
* @param infoColFamilyList filter list for info column family.
*/
private void excludeFieldsFromInfoColFamily(FilterList infoColFamilyList) {
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// Events not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, ApplicationColumnPrefix.EVENT));
}
// info not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.INFO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, ApplicationColumnPrefix.INFO));
}
// is related to not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, ApplicationColumnPrefix.IS_RELATED_TO));
}
// relates to not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, ApplicationColumnPrefix.RELATES_TO));
}
}
/**
* Updates filter list based on fields for confs and metrics to retrieve.
*
* @param listBasedOnFields filter list based on fields.
* @throws IOException if any problem occurs while updating filter list.
*/
private void updateFilterForConfsAndMetricsToRetrieve(
FilterList listBasedOnFields) throws IOException {
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Please note that if confsToRetrieve is specified, we would have added
// CONFS to fields to retrieve in augmentParams() even if not specified.
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS)) {
// Create a filter list for configs.
listBasedOnFields.addFilter(TimelineFilterUtils.
createFilterForConfsOrMetricsToRetrieve(
dataToRetrieve.getConfsToRetrieve(),
ApplicationColumnFamily.CONFIGS, ApplicationColumnPrefix.CONFIG));
}
// Please note that if metricsToRetrieve is specified, we would have added
// METRICS to fields to retrieve in augmentParams() even if not specified.
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS)) {
// Create a filter list for metrics.
listBasedOnFields.addFilter(TimelineFilterUtils.
createFilterForConfsOrMetricsToRetrieve(
dataToRetrieve.getMetricsToRetrieve(),
ApplicationColumnFamily.METRICS, ApplicationColumnPrefix.METRIC));
}
}
@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
if (!needCreateFilterListBasedOnFields()) {
// Fetch all the columns. No need of a filter.
return null;
}
FilterList listBasedOnFields = new FilterList(Operator.MUST_PASS_ONE);
FilterList infoColFamilyList = new FilterList(); FilterList infoColFamilyList = new FilterList();
// By default fetch everything in INFO column family. // By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = FamilyFilter infoColumnFamily =
new FamilyFilter(CompareOp.EQUAL, new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(ApplicationColumnFamily.INFO.getBytes())); new BinaryComparator(ApplicationColumnFamily.INFO.getBytes()));
infoColFamilyList.addFilter(infoColumnFamily); infoColFamilyList.addFilter(infoColumnFamily);
// Events not required. if (!isSingleEntityRead() && fetchPartialColsFromInfoFamily()) {
TimelineEntityFilters filters = getFilters(); // We can fetch only some of the columns from info family.
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.EVENTS) && infoColFamilyList.addFilter(createFilterListForColsOfInfoFamily());
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) && } else {
(isSingleEntityRead() || filters.getEventFilters() == null)) { // Exclude column prefixes in info column family which are not required
infoColFamilyList.addFilter( // based on fields to retrieve.
new QualifierFilter(CompareOp.NOT_EQUAL, excludeFieldsFromInfoColFamily(infoColFamilyList);
new BinaryPrefixComparator(
ApplicationColumnPrefix.EVENT.getColumnPrefixBytes(""))));
} }
// info not required. listBasedOnFields.addFilter(infoColFamilyList);
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.INFO) &&
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) && updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields);
(isSingleEntityRead() || filters.getInfoFilters() == null)) { return listBasedOnFields;
infoColFamilyList.addFilter(
new QualifierFilter(CompareOp.NOT_EQUAL,
new BinaryPrefixComparator(
ApplicationColumnPrefix.INFO.getColumnPrefixBytes(""))));
}
// is releated to not required.
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.IS_RELATED_TO) &&
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
(isSingleEntityRead() || filters.getIsRelatedTo() == null)) {
infoColFamilyList.addFilter(
new QualifierFilter(CompareOp.NOT_EQUAL,
new BinaryPrefixComparator(
ApplicationColumnPrefix.IS_RELATED_TO.getColumnPrefixBytes(""))));
}
// relates to not required.
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.RELATES_TO) &&
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
(isSingleEntityRead() || filters.getRelatesTo() == null)) {
infoColFamilyList.addFilter(
new QualifierFilter(CompareOp.NOT_EQUAL,
new BinaryPrefixComparator(
ApplicationColumnPrefix.RELATES_TO.getColumnPrefixBytes(""))));
}
list.addFilter(infoColFamilyList);
if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS) ||
(!isSingleEntityRead() && filters.getConfigFilters() != null)) ||
(dataToRetrieve.getConfsToRetrieve() != null &&
!dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty())) {
FilterList filterCfg =
new FilterList(new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(ApplicationColumnFamily.CONFIGS.getBytes())));
if (dataToRetrieve.getConfsToRetrieve() != null &&
!dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty()) {
filterCfg.addFilter(TimelineFilterUtils.createHBaseFilterList(
ApplicationColumnPrefix.CONFIG,
dataToRetrieve.getConfsToRetrieve()));
}
list.addFilter(filterCfg);
}
if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS) ||
(!isSingleEntityRead() && filters.getMetricFilters() != null)) ||
(dataToRetrieve.getMetricsToRetrieve() != null &&
!dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty())) {
FilterList filterMetrics =
new FilterList(new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(ApplicationColumnFamily.METRICS.getBytes())));
if (dataToRetrieve.getMetricsToRetrieve() != null &&
!dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty()) {
filterMetrics.addFilter(TimelineFilterUtils.createHBaseFilterList(
ApplicationColumnPrefix.METRIC,
dataToRetrieve.getMetricsToRetrieve()));
}
list.addFilter(filterMetrics);
}
return list;
} }
@Override @Override
@ -182,6 +321,9 @@ protected Result getResult(Configuration hbaseConf, Connection conn,
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null");
Preconditions.checkNotNull(
getDataToRetrieve(), "data to retrieve shouldn't be null");
Preconditions.checkNotNull(getContext().getClusterId(), Preconditions.checkNotNull(getContext().getClusterId(),
"clusterId shouldn't be null"); "clusterId shouldn't be null");
Preconditions.checkNotNull(getContext().getEntityType(), Preconditions.checkNotNull(getContext().getEntityType(),
@ -202,6 +344,7 @@ protected void augmentParams(Configuration hbaseConf, Connection conn)
throws IOException { throws IOException {
TimelineReaderContext context = getContext(); TimelineReaderContext context = getContext();
if (isSingleEntityRead()) { if (isSingleEntityRead()) {
// Get flow context information from AppToFlow table.
if (context.getFlowName() == null || context.getFlowRunId() == null || if (context.getFlowName() == null || context.getFlowRunId() == null ||
context.getUserId() == null) { context.getUserId() == null) {
FlowContext flowContext = lookupFlowContext( FlowContext flowContext = lookupFlowContext(
@ -211,7 +354,12 @@ protected void augmentParams(Configuration hbaseConf, Connection conn)
context.setUserId(flowContext.getUserId()); context.setUserId(flowContext.getUserId());
} }
} }
// Add configs/metrics to fields to retrieve if confsToRetrieve and/or
// metricsToRetrieve are specified.
getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve(); getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve();
if (!isSingleEntityRead()) {
createFiltersIfNull();
}
} }
@Override @Override
@ -252,114 +400,84 @@ protected TimelineEntity parseEntity(Result result) throws IOException {
Number createdTime = Number createdTime =
(Number)ApplicationColumn.CREATED_TIME.readResult(result); (Number)ApplicationColumn.CREATED_TIME.readResult(result);
entity.setCreatedTime(createdTime.longValue()); entity.setCreatedTime(createdTime.longValue());
if (!isSingleEntityRead() &&
(entity.getCreatedTime() < filters.getCreatedTimeBegin() ||
entity.getCreatedTime() > filters.getCreatedTimeEnd())) {
return null;
}
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// fetch is related to entities // fetch is related to entities and match isRelatedTo filter. If isRelatedTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// isRelatedTo are not set in HBase scan.
boolean checkIsRelatedTo = boolean checkIsRelatedTo =
filters != null && filters.getIsRelatedTo() != null && !isSingleEntityRead() && filters.getIsRelatedTo() != null &&
filters.getIsRelatedTo().size() > 0; filters.getIsRelatedTo().getFilterList().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) || if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.IS_RELATED_TO) ||
fieldsToRetrieve.contains(Field.IS_RELATED_TO) || checkIsRelatedTo) { checkIsRelatedTo) {
readRelationship(entity, result, ApplicationColumnPrefix.IS_RELATED_TO, TimelineStorageUtils.readRelationship(
true); entity, result, ApplicationColumnPrefix.IS_RELATED_TO, true);
if (checkIsRelatedTo && !TimelineStorageUtils.matchRelations( if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity,
entity.getIsRelatedToEntities(), filters.getIsRelatedTo())) { filters.getIsRelatedTo())) {
return null; return null;
} }
if (!fieldsToRetrieve.contains(Field.ALL) && if (!TimelineStorageUtils.hasField(fieldsToRetrieve,
!fieldsToRetrieve.contains(Field.IS_RELATED_TO)) { Field.IS_RELATED_TO)) {
entity.getIsRelatedToEntities().clear(); entity.getIsRelatedToEntities().clear();
} }
} }
// fetch relates to entities // fetch relates to entities and match relatesTo filter. If relatesTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// relatesTo are not set in HBase scan.
boolean checkRelatesTo = boolean checkRelatesTo =
filters != null && filters.getRelatesTo() != null && !isSingleEntityRead() && filters.getRelatesTo() != null &&
filters.getRelatesTo().size() > 0; filters.getRelatesTo().getFilterList().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) || if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO) ||
fieldsToRetrieve.contains(Field.RELATES_TO) || checkRelatesTo) { checkRelatesTo) {
readRelationship(entity, result, ApplicationColumnPrefix.RELATES_TO, TimelineStorageUtils.readRelationship(
false); entity, result, ApplicationColumnPrefix.RELATES_TO, false);
if (checkRelatesTo && !TimelineStorageUtils.matchRelations( if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity,
entity.getRelatesToEntities(), filters.getRelatesTo())) { filters.getRelatesTo())) {
return null; return null;
} }
if (!fieldsToRetrieve.contains(Field.ALL) && if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO)) {
!fieldsToRetrieve.contains(Field.RELATES_TO)) {
entity.getRelatesToEntities().clear(); entity.getRelatesToEntities().clear();
} }
} }
// fetch info // fetch info if fieldsToRetrieve contains INFO or ALL.
boolean checkInfo = filters != null && filters.getInfoFilters() != null && if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.INFO)) {
filters.getInfoFilters().size() > 0; TimelineStorageUtils.readKeyValuePairs(
if (fieldsToRetrieve.contains(Field.ALL) || entity, result, ApplicationColumnPrefix.INFO, false);
fieldsToRetrieve.contains(Field.INFO) || checkInfo) {
readKeyValuePairs(entity, result, ApplicationColumnPrefix.INFO, false);
if (checkInfo &&
!TimelineStorageUtils.matchFilters(
entity.getInfo(), filters.getInfoFilters())) {
return null;
}
if (!fieldsToRetrieve.contains(Field.ALL) &&
!fieldsToRetrieve.contains(Field.INFO)) {
entity.getInfo().clear();
}
} }
// fetch configs // fetch configs if fieldsToRetrieve contains CONFIGS or ALL.
boolean checkConfigs = if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.CONFIGS)) {
filters != null && filters.getConfigFilters() != null && TimelineStorageUtils.readKeyValuePairs(
filters.getConfigFilters().size() > 0; entity, result, ApplicationColumnPrefix.CONFIG, true);
if (fieldsToRetrieve.contains(Field.ALL) ||
fieldsToRetrieve.contains(Field.CONFIGS) || checkConfigs) {
readKeyValuePairs(entity, result, ApplicationColumnPrefix.CONFIG, true);
if (checkConfigs && !TimelineStorageUtils.matchFilters(
entity.getConfigs(), filters.getConfigFilters())) {
return null;
}
if (!fieldsToRetrieve.contains(Field.ALL) &&
!fieldsToRetrieve.contains(Field.CONFIGS)) {
entity.getConfigs().clear();
}
} }
// fetch events // fetch events and match event filters if they exist. If event filters do
// not match, entity would be dropped. We have to match filters locally
// as relevant HBase filters to filter out rows on the basis of events
// are not set in HBase scan.
boolean checkEvents = boolean checkEvents =
filters != null && filters.getEventFilters() != null && !isSingleEntityRead() && filters.getEventFilters() != null &&
filters.getEventFilters().size() > 0; filters.getEventFilters().getFilterList().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) || if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS) ||
fieldsToRetrieve.contains(Field.EVENTS) || checkEvents) { checkEvents) {
readEvents(entity, result, true); TimelineStorageUtils.readEvents(
if (checkEvents && !TimelineStorageUtils.matchEventFilters( entity, result, ApplicationColumnPrefix.EVENT);
entity.getEvents(), filters.getEventFilters())) { if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity,
filters.getEventFilters())) {
return null; return null;
} }
if (!fieldsToRetrieve.contains(Field.ALL) && if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS)) {
!fieldsToRetrieve.contains(Field.EVENTS)) {
entity.getEvents().clear(); entity.getEvents().clear();
} }
} }
// fetch metrics // fetch metrics if fieldsToRetrieve contains METRICS or ALL.
boolean checkMetrics = if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.METRICS)) {
filters != null && filters.getMetricFilters() != null &&
filters.getMetricFilters().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) ||
fieldsToRetrieve.contains(Field.METRICS) || checkMetrics) {
readMetrics(entity, result, ApplicationColumnPrefix.METRIC); readMetrics(entity, result, ApplicationColumnPrefix.METRIC);
if (checkMetrics && !TimelineStorageUtils.matchMetricFilters(
entity.getMetrics(), filters.getMetricFilters())) {
return null;
}
if (!fieldsToRetrieve.contains(Field.ALL) &&
!fieldsToRetrieve.contains(Field.METRICS)) {
entity.getMetrics().clear();
}
} }
return entity; return entity;
} }

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.yarn.server.timelineservice.storage.reader; package org.apache.hadoop.yarn.server.timelineservice.storage.reader;
import java.io.IOException; import java.io.IOException;
@ -75,6 +76,12 @@ protected void validateParams() {
@Override @Override
protected void augmentParams(Configuration hbaseConf, Connection conn) protected void augmentParams(Configuration hbaseConf, Connection conn)
throws IOException { throws IOException {
createFiltersIfNull();
}
@Override
protected FilterList constructFilterListBasedOnFilters() throws IOException {
return null;
} }
@Override @Override

View File

@ -38,9 +38,11 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumn;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnFamily;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnPrefix; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnPrefix;
@ -76,6 +78,9 @@ protected BaseTable<?> getTable() {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null");
Preconditions.checkNotNull(
getDataToRetrieve(), "data to retrieve shouldn't be null");
Preconditions.checkNotNull(getContext().getClusterId(), Preconditions.checkNotNull(getContext().getClusterId(),
"clusterId shouldn't be null"); "clusterId shouldn't be null");
Preconditions.checkNotNull(getContext().getUserId(), Preconditions.checkNotNull(getContext().getUserId(),
@ -90,37 +95,87 @@ protected void validateParams() {
@Override @Override
protected void augmentParams(Configuration hbaseConf, Connection conn) { protected void augmentParams(Configuration hbaseConf, Connection conn) {
// Add metrics to fields to retrieve if metricsToRetrieve is specified.
getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve(); getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve();
if (!isSingleEntityRead()) {
createFiltersIfNull();
}
}
protected FilterList constructFilterListBasedOnFilters() throws IOException {
FilterList listBasedOnFilters = new FilterList();
// Filter based on created time range.
Long createdTimeBegin = getFilters().getCreatedTimeBegin();
Long createdTimeEnd = getFilters().getCreatedTimeEnd();
if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createSingleColValueFiltersByRange(
FlowRunColumn.MIN_START_TIME, createdTimeBegin, createdTimeEnd));
}
// Filter based on metric filters.
TimelineFilterList metricFilters = getFilters().getMetricFilters();
if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createHBaseFilterList(
FlowRunColumnPrefix.METRIC, metricFilters));
}
return listBasedOnFilters;
}
/**
* Add {@link QualifierFilter} filters to filter list for each column of
* flow run table.
*
* @return filter list to which qualifier filters have been added.
*/
private FilterList updateFixedColumns() {
FilterList columnsList = new FilterList(Operator.MUST_PASS_ONE);
for (FlowRunColumn column : FlowRunColumn.values()) {
columnsList.addFilter(new QualifierFilter(CompareOp.EQUAL,
new BinaryComparator(column.getColumnQualifierBytes())));
}
return columnsList;
} }
@Override @Override
protected FilterList constructFilterListBasedOnFields() { protected FilterList constructFilterListBasedOnFields() throws IOException {
FilterList list = new FilterList(Operator.MUST_PASS_ONE); FilterList list = new FilterList(Operator.MUST_PASS_ONE);
// By default fetch everything in INFO column family. // By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = FamilyFilter infoColumnFamily =
new FamilyFilter(CompareOp.EQUAL, new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(FlowRunColumnFamily.INFO.getBytes())); new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve(); TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Metrics not required. // If multiple entities have to be retrieved, check if metrics have to be
if (!isSingleEntityRead() && // retrieved and if not, add a filter so that metrics can be excluded.
!dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS) && // Metrics are always returned if we are reading a single entity.
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL)) { if (!isSingleEntityRead() && !TimelineStorageUtils.hasField(
dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE); FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
infoColFamilyList.addFilter(infoColumnFamily); infoColFamilyList.addFilter(infoColumnFamily);
infoColFamilyList.addFilter( infoColFamilyList.addFilter(
new QualifierFilter(CompareOp.NOT_EQUAL, new QualifierFilter(CompareOp.NOT_EQUAL,
new BinaryPrefixComparator( new BinaryPrefixComparator(
FlowRunColumnPrefix.METRIC.getColumnPrefixBytes("")))); FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(""))));
list.addFilter(infoColFamilyList);
}
if (dataToRetrieve.getMetricsToRetrieve() != null &&
!dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty()) {
FilterList infoColFamilyList = new FilterList();
infoColFamilyList.addFilter(infoColumnFamily);
infoColFamilyList.addFilter(TimelineFilterUtils.createHBaseFilterList(
FlowRunColumnPrefix.METRIC, dataToRetrieve.getMetricsToRetrieve()));
list.addFilter(infoColFamilyList); list.addFilter(infoColFamilyList);
} else {
// Check if metricsToRetrieve are specified and if they are, create a
// filter list for info column family by adding flow run tables columns
// and a list for metrics to retrieve. Pls note that fieldsToRetrieve
// will have METRICS added to it if metricsToRetrieve are specified
// (in augmentParams()).
TimelineFilterList metricsToRetrieve =
dataToRetrieve.getMetricsToRetrieve();
if (metricsToRetrieve != null &&
!metricsToRetrieve.getFilterList().isEmpty()) {
FilterList infoColFamilyList = new FilterList();
infoColFamilyList.addFilter(infoColumnFamily);
FilterList columnsList = updateFixedColumns();
columnsList.addFilter(
TimelineFilterUtils.createHBaseFilterList(
FlowRunColumnPrefix.METRIC, metricsToRetrieve));
infoColFamilyList.addFilter(columnsList);
list.addFilter(infoColFamilyList);
}
} }
return list; return list;
} }
@ -175,11 +230,6 @@ protected TimelineEntity parseEntity(Result result) throws IOException {
if (startTime != null) { if (startTime != null) {
flowRun.setStartTime(startTime.longValue()); flowRun.setStartTime(startTime.longValue());
} }
if (!isSingleEntityRead() &&
(flowRun.getStartTime() < getFilters().getCreatedTimeBegin() ||
flowRun.getStartTime() > getFilters().getCreatedTimeEnd())) {
return null;
}
// read the end time if available // read the end time if available
Long endTime = (Long)FlowRunColumn.MAX_END_TIME.readResult(result); Long endTime = (Long)FlowRunColumn.MAX_END_TIME.readResult(result);
@ -193,9 +243,10 @@ protected TimelineEntity parseEntity(Result result) throws IOException {
flowRun.setVersion(version); flowRun.setVersion(version);
} }
// read metrics // read metrics if its a single entity query or if METRICS are part of
if (isSingleEntityRead() || // fieldsToRetrieve.
getDataToRetrieve().getFieldsToRetrieve().contains(Field.METRICS)) { if (isSingleEntityRead() || TimelineStorageUtils.hasField(
getDataToRetrieve().getFieldsToRetrieve(), Field.METRICS)) {
readMetrics(flowRun, result, FlowRunColumnPrefix.METRIC); readMetrics(flowRun, result, FlowRunColumnPrefix.METRIC);
} }

View File

@ -19,13 +19,8 @@
import java.io.IOException; import java.io.IOException;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
@ -33,28 +28,22 @@
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.FamilyFilter; import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumn;
import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumn;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnFamily;
@ -71,7 +60,6 @@
*/ */
class GenericEntityReader extends TimelineEntityReader { class GenericEntityReader extends TimelineEntityReader {
private static final EntityTable ENTITY_TABLE = new EntityTable(); private static final EntityTable ENTITY_TABLE = new EntityTable();
private static final Log LOG = LogFactory.getLog(GenericEntityReader.class);
/** /**
* Used to look up the flow context. * Used to look up the flow context.
@ -97,92 +85,322 @@ protected BaseTable<?> getTable() {
} }
@Override @Override
protected FilterList constructFilterListBasedOnFields() { protected FilterList constructFilterListBasedOnFilters() throws IOException {
FilterList list = new FilterList(Operator.MUST_PASS_ONE); // Filters here cannot be null for multiple entity reads as they are set in
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve(); // augmentParams if null.
// Fetch all the columns. FilterList listBasedOnFilters = new FilterList();
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) && TimelineEntityFilters filters = getFilters();
(dataToRetrieve.getConfsToRetrieve() == null || // Create filter list based on created time range and add it to
dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty()) && // listBasedOnFilters.
(dataToRetrieve.getMetricsToRetrieve() == null || long createdTimeBegin = filters.getCreatedTimeBegin();
dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty())) { long createdTimeEnd = filters.getCreatedTimeEnd();
return list; if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createSingleColValueFiltersByRange(
EntityColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd));
} }
// Create filter list based on metric filters and add it to
// listBasedOnFilters.
TimelineFilterList metricFilters = filters.getMetricFilters();
if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createHBaseFilterList(
EntityColumnPrefix.METRIC, metricFilters));
}
// Create filter list based on config filters and add it to
// listBasedOnFilters.
TimelineFilterList configFilters = filters.getConfigFilters();
if (configFilters != null && !configFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createHBaseFilterList(
EntityColumnPrefix.CONFIG, configFilters));
}
// Create filter list based on info filters and add it to listBasedOnFilters
TimelineFilterList infoFilters = filters.getInfoFilters();
if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(
TimelineFilterUtils.createHBaseFilterList(
EntityColumnPrefix.INFO, infoFilters));
}
return listBasedOnFilters;
}
/**
* Check if we need to fetch only some of the event columns.
*
* @return true if we need to fetch some of the columns, false otherwise.
*/
private static boolean fetchPartialEventCols(TimelineFilterList eventFilters,
EnumSet<Field> fieldsToRetrieve) {
return (eventFilters != null && !eventFilters.getFilterList().isEmpty() &&
!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS));
}
/**
* Check if we need to fetch only some of the relates_to columns.
*
* @return true if we need to fetch some of the columns, false otherwise.
*/
private static boolean fetchPartialRelatesToCols(TimelineFilterList relatesTo,
EnumSet<Field> fieldsToRetrieve) {
return (relatesTo != null && !relatesTo.getFilterList().isEmpty() &&
!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO));
}
/**
* Check if we need to fetch only some of the is_related_to columns.
*
* @return true if we need to fetch some of the columns, false otherwise.
*/
private static boolean fetchPartialIsRelatedToCols(
TimelineFilterList isRelatedTo, EnumSet<Field> fieldsToRetrieve) {
return (isRelatedTo != null && !isRelatedTo.getFilterList().isEmpty() &&
!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.IS_RELATED_TO));
}
/**
* Check if we need to fetch only some of the columns based on event filters,
* relatesto and isrelatedto from info family.
*
* @return true, if we need to fetch only some of the columns, false if we
* need to fetch all the columns under info column family.
*/
protected boolean fetchPartialColsFromInfoFamily() {
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
TimelineEntityFilters filters = getFilters();
return fetchPartialEventCols(filters.getEventFilters(), fieldsToRetrieve) ||
fetchPartialRelatesToCols(filters.getRelatesTo(), fieldsToRetrieve) ||
fetchPartialIsRelatedToCols(filters.getIsRelatedTo(), fieldsToRetrieve);
}
/**
* Check if we need to create filter list based on fields. We need to create
* a filter list iff all fields need not be retrieved or we have some specific
* fields or metrics to retrieve. We also need to create a filter list if we
* have relationships(relatesTo/isRelatedTo) and event filters specified for
* the query.
*
* @return true if we need to create the filter list, false otherwise.
*/
protected boolean needCreateFilterListBasedOnFields() {
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Check if all fields are to be retrieved or not. If all fields have to
// be retrieved, also check if we have some metrics or configs to
// retrieve specified for the query because then a filter list will have
// to be created.
boolean flag = !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) ||
(dataToRetrieve.getConfsToRetrieve() != null &&
!dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty()) ||
(dataToRetrieve.getMetricsToRetrieve() != null &&
!dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty());
// Filters need to be checked only if we are reading multiple entities. If
// condition above is false, we check if there are relationships(relatesTo/
// isRelatedTo) and event filters specified for the query.
if (!flag && !isSingleEntityRead()) {
TimelineEntityFilters filters = getFilters();
flag = (filters.getEventFilters() != null &&
!filters.getEventFilters().getFilterList().isEmpty()) ||
(filters.getIsRelatedTo() != null &&
!filters.getIsRelatedTo().getFilterList().isEmpty()) ||
(filters.getRelatesTo() != null &&
!filters.getRelatesTo().getFilterList().isEmpty());
}
return flag;
}
/**
* Add {@link QualifierFilter} filters to filter list for each column of
* entity table.
*
* @param list filter list to which qualifier filters have to be added.
*/
protected void updateFixedColumns(FilterList list) {
for (EntityColumn column : EntityColumn.values()) {
list.addFilter(new QualifierFilter(CompareOp.EQUAL,
new BinaryComparator(column.getColumnQualifierBytes())));
}
}
/**
* Creates a filter list which indicates that only some of the column
* qualifiers in the info column family will be returned in result.
*
* @param isApplication If true, it means operations are to be performed for
* application table, otherwise for entity table.
* @return filter list.
* @throws IOException if any problem occurs while creating filter list.
*/
private FilterList createFilterListForColsOfInfoFamily()
throws IOException {
FilterList infoFamilyColsFilter = new FilterList(Operator.MUST_PASS_ONE);
// Add filters for each column in entity table.
updateFixedColumns(infoFamilyColsFilter);
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// If INFO field has to be retrieved, add a filter for fetching columns
// with INFO column prefix.
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.INFO)) {
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, EntityColumnPrefix.INFO));
}
TimelineFilterList relatesTo = getFilters().getRelatesTo();
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO)) {
// If RELATES_TO field has to be retrieved, add a filter for fetching
// columns with RELATES_TO column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, EntityColumnPrefix.RELATES_TO));
} else if (relatesTo != null && !relatesTo.getFilterList().isEmpty()) {
// Even if fields to retrieve does not contain RELATES_TO, we still
// need to have a filter to fetch some of the column qualifiers if
// relatesTo filters are specified. relatesTo filters will then be
// matched after fetching rows from HBase.
Set<String> relatesToCols =
TimelineFilterUtils.fetchColumnsFromFilterList(relatesTo);
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createFiltersFromColumnQualifiers(
EntityColumnPrefix.RELATES_TO, relatesToCols));
}
TimelineFilterList isRelatedTo = getFilters().getIsRelatedTo();
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
// If IS_RELATED_TO field has to be retrieved, add a filter for fetching
// columns with IS_RELATED_TO column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, EntityColumnPrefix.IS_RELATED_TO));
} else if (isRelatedTo != null && !isRelatedTo.getFilterList().isEmpty()) {
// Even if fields to retrieve does not contain IS_RELATED_TO, we still
// need to have a filter to fetch some of the column qualifiers if
// isRelatedTo filters are specified. isRelatedTo filters will then be
// matched after fetching rows from HBase.
Set<String> isRelatedToCols =
TimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo);
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createFiltersFromColumnQualifiers(
EntityColumnPrefix.IS_RELATED_TO, isRelatedToCols));
}
TimelineFilterList eventFilters = getFilters().getEventFilters();
if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS)) {
// If EVENTS field has to be retrieved, add a filter for fetching columns
// with EVENT column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.EQUAL, EntityColumnPrefix.EVENT));
} else if (eventFilters != null && !eventFilters.getFilterList().isEmpty()){
// Even if fields to retrieve does not contain EVENTS, we still need to
// have a filter to fetch some of the column qualifiers on the basis of
// event filters specified. Event filters will then be matched after
// fetching rows from HBase.
Set<String> eventCols =
TimelineFilterUtils.fetchColumnsFromFilterList(eventFilters);
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createFiltersFromColumnQualifiers(
EntityColumnPrefix.EVENT, eventCols));
}
return infoFamilyColsFilter;
}
/**
* Exclude column prefixes via filters which are not required(based on fields
* to retrieve) from info column family. These filters are added to filter
* list which contains a filter for getting info column family.
*
* @param infoColFamilyList filter list for info column family.
*/
private void excludeFieldsFromInfoColFamily(FilterList infoColFamilyList) {
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// Events not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, EntityColumnPrefix.EVENT));
}
// info not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.INFO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, EntityColumnPrefix.INFO));
}
// is related to not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, EntityColumnPrefix.IS_RELATED_TO));
}
// relates to not required.
if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(
CompareOp.NOT_EQUAL, EntityColumnPrefix.RELATES_TO));
}
}
/**
* Updates filter list based on fields for confs and metrics to retrieve.
*
* @param listBasedOnFields filter list based on fields.
* @throws IOException if any problem occurs while updating filter list.
*/
private void updateFilterForConfsAndMetricsToRetrieve(
FilterList listBasedOnFields) throws IOException {
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Please note that if confsToRetrieve is specified, we would have added
// CONFS to fields to retrieve in augmentParams() even if not specified.
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS)) {
// Create a filter list for configs.
listBasedOnFields.addFilter(TimelineFilterUtils.
createFilterForConfsOrMetricsToRetrieve(
dataToRetrieve.getConfsToRetrieve(),
EntityColumnFamily.CONFIGS, EntityColumnPrefix.CONFIG));
}
// Please note that if metricsToRetrieve is specified, we would have added
// METRICS to fields to retrieve in augmentParams() even if not specified.
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS)) {
// Create a filter list for metrics.
listBasedOnFields.addFilter(TimelineFilterUtils.
createFilterForConfsOrMetricsToRetrieve(
dataToRetrieve.getMetricsToRetrieve(),
EntityColumnFamily.METRICS, EntityColumnPrefix.METRIC));
}
}
@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
if (!needCreateFilterListBasedOnFields()) {
// Fetch all the columns. No need of a filter.
return null;
}
FilterList listBasedOnFields = new FilterList(Operator.MUST_PASS_ONE);
FilterList infoColFamilyList = new FilterList(); FilterList infoColFamilyList = new FilterList();
// By default fetch everything in INFO column family. // By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = FamilyFilter infoColumnFamily =
new FamilyFilter(CompareOp.EQUAL, new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(EntityColumnFamily.INFO.getBytes())); new BinaryComparator(EntityColumnFamily.INFO.getBytes()));
infoColFamilyList.addFilter(infoColumnFamily); infoColFamilyList.addFilter(infoColumnFamily);
TimelineEntityFilters filters = getFilters(); if (!isSingleEntityRead() && fetchPartialColsFromInfoFamily()) {
// Events not required. // We can fetch only some of the columns from info family.
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.EVENTS) && infoColFamilyList.addFilter(createFilterListForColsOfInfoFamily());
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) && } else {
(isSingleEntityRead() || filters.getEventFilters() == null)) { // Exclude column prefixes in info column family which are not required
infoColFamilyList.addFilter( // based on fields to retrieve.
new QualifierFilter(CompareOp.NOT_EQUAL, excludeFieldsFromInfoColFamily(infoColFamilyList);
new BinaryPrefixComparator(
EntityColumnPrefix.EVENT.getColumnPrefixBytes(""))));
} }
// info not required. listBasedOnFields.addFilter(infoColFamilyList);
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.INFO) && updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields);
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) && return listBasedOnFields;
(isSingleEntityRead() || filters.getInfoFilters() == null)) {
infoColFamilyList.addFilter(
new QualifierFilter(CompareOp.NOT_EQUAL,
new BinaryPrefixComparator(
EntityColumnPrefix.INFO.getColumnPrefixBytes(""))));
}
// is related to not required.
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.IS_RELATED_TO) &&
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
(isSingleEntityRead() || filters.getIsRelatedTo() == null)) {
infoColFamilyList.addFilter(
new QualifierFilter(CompareOp.NOT_EQUAL,
new BinaryPrefixComparator(
EntityColumnPrefix.IS_RELATED_TO.getColumnPrefixBytes(""))));
}
// relates to not required.
if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.RELATES_TO) &&
!dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
(isSingleEntityRead() || filters.getRelatesTo() == null)) {
infoColFamilyList.addFilter(
new QualifierFilter(CompareOp.NOT_EQUAL,
new BinaryPrefixComparator(
EntityColumnPrefix.RELATES_TO.getColumnPrefixBytes(""))));
}
list.addFilter(infoColFamilyList);
if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS) ||
(!isSingleEntityRead() && filters.getConfigFilters() != null)) ||
(dataToRetrieve.getConfsToRetrieve() != null &&
!dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty())) {
FilterList filterCfg =
new FilterList(new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(EntityColumnFamily.CONFIGS.getBytes())));
if (dataToRetrieve.getConfsToRetrieve() != null &&
!dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty()) {
filterCfg.addFilter(TimelineFilterUtils.createHBaseFilterList(
EntityColumnPrefix.CONFIG, dataToRetrieve.getConfsToRetrieve()));
}
list.addFilter(filterCfg);
}
if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS) ||
(!isSingleEntityRead() && filters.getMetricFilters() != null)) ||
(dataToRetrieve.getMetricsToRetrieve() != null &&
!dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty())) {
FilterList filterMetrics =
new FilterList(new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(EntityColumnFamily.METRICS.getBytes())));
if (dataToRetrieve.getMetricsToRetrieve() != null &&
!dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty()) {
filterMetrics.addFilter(TimelineFilterUtils.createHBaseFilterList(
EntityColumnPrefix.METRIC, dataToRetrieve.getMetricsToRetrieve()));
}
list.addFilter(filterMetrics);
}
return list;
} }
/**
* Looks up flow context from AppToFlow table.
*
* @param clusterId Cluster Id.
* @param appId App Id.
* @param hbaseConf HBase configuration.
* @param conn HBase Connection.
* @return flow context information.
* @throws IOException if any problem occurs while fetching flow information.
*/
protected FlowContext lookupFlowContext(String clusterId, String appId, protected FlowContext lookupFlowContext(String clusterId, String appId,
Configuration hbaseConf, Connection conn) throws IOException { Configuration hbaseConf, Connection conn) throws IOException {
byte[] rowKey = AppToFlowRowKey.getRowKey(clusterId, appId); byte[] rowKey = AppToFlowRowKey.getRowKey(clusterId, appId);
@ -200,6 +418,9 @@ protected FlowContext lookupFlowContext(String clusterId, String appId,
} }
} }
/**
* Encapsulates flow context information.
*/
protected static class FlowContext { protected static class FlowContext {
private final String userId; private final String userId;
private final String flowName; private final String flowName;
@ -222,6 +443,9 @@ protected Long getFlowRunId() {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null");
Preconditions.checkNotNull(
getDataToRetrieve(), "data to retrieve shouldn't be null");
Preconditions.checkNotNull(getContext().getClusterId(), Preconditions.checkNotNull(getContext().getClusterId(),
"clusterId shouldn't be null"); "clusterId shouldn't be null");
Preconditions.checkNotNull(getContext().getAppId(), Preconditions.checkNotNull(getContext().getAppId(),
@ -241,13 +465,19 @@ protected void augmentParams(Configuration hbaseConf, Connection conn)
// In reality all three should be null or neither should be null // In reality all three should be null or neither should be null
if (context.getFlowName() == null || context.getFlowRunId() == null || if (context.getFlowName() == null || context.getFlowRunId() == null ||
context.getUserId() == null) { context.getUserId() == null) {
// Get flow context information from AppToFlow table.
FlowContext flowContext = lookupFlowContext( FlowContext flowContext = lookupFlowContext(
context.getClusterId(), context.getAppId(), hbaseConf, conn); context.getClusterId(), context.getAppId(), hbaseConf, conn);
context.setFlowName(flowContext.flowName); context.setFlowName(flowContext.flowName);
context.setFlowRunId(flowContext.flowRunId); context.setFlowRunId(flowContext.flowRunId);
context.setUserId(flowContext.userId); context.setUserId(flowContext.userId);
} }
// Add configs/metrics to fields to retrieve if confsToRetrieve and/or
// metricsToRetrieve are specified.
getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve(); getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve();
if (!isSingleEntityRead()) {
createFiltersIfNull();
}
} }
@Override @Override
@ -298,215 +528,84 @@ protected TimelineEntity parseEntity(Result result) throws IOException {
// fetch created time // fetch created time
Number createdTime = (Number)EntityColumn.CREATED_TIME.readResult(result); Number createdTime = (Number)EntityColumn.CREATED_TIME.readResult(result);
entity.setCreatedTime(createdTime.longValue()); entity.setCreatedTime(createdTime.longValue());
if (!isSingleEntityRead() &&
(entity.getCreatedTime() < filters.getCreatedTimeBegin() ||
entity.getCreatedTime() > filters.getCreatedTimeEnd())) {
return null;
}
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// fetch is related to entities // fetch is related to entities and match isRelatedTo filter. If isRelatedTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// isRelatedTo are not set in HBase scan.
boolean checkIsRelatedTo = boolean checkIsRelatedTo =
filters != null && filters.getIsRelatedTo() != null && !isSingleEntityRead() && filters.getIsRelatedTo() != null &&
filters.getIsRelatedTo().size() > 0; filters.getIsRelatedTo().getFilterList().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) || if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.IS_RELATED_TO) ||
fieldsToRetrieve.contains(Field.IS_RELATED_TO) || checkIsRelatedTo) { checkIsRelatedTo) {
readRelationship(entity, result, EntityColumnPrefix.IS_RELATED_TO, true); TimelineStorageUtils.readRelationship(
if (checkIsRelatedTo && !TimelineStorageUtils.matchRelations( entity, result, EntityColumnPrefix.IS_RELATED_TO, true);
entity.getIsRelatedToEntities(), filters.getIsRelatedTo())) { if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity,
filters.getIsRelatedTo())) {
return null; return null;
} }
if (!fieldsToRetrieve.contains(Field.ALL) && if (!TimelineStorageUtils.hasField(fieldsToRetrieve,
!fieldsToRetrieve.contains(Field.IS_RELATED_TO)) { Field.IS_RELATED_TO)) {
entity.getIsRelatedToEntities().clear(); entity.getIsRelatedToEntities().clear();
} }
} }
// fetch relates to entities // fetch relates to entities and match relatesTo filter. If relatesTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// relatesTo are not set in HBase scan.
boolean checkRelatesTo = boolean checkRelatesTo =
filters != null && filters.getRelatesTo() != null && !isSingleEntityRead() && filters.getRelatesTo() != null &&
filters.getRelatesTo().size() > 0; filters.getRelatesTo().getFilterList().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) || if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO) ||
fieldsToRetrieve.contains(Field.RELATES_TO) || checkRelatesTo) { checkRelatesTo) {
readRelationship(entity, result, EntityColumnPrefix.RELATES_TO, false); TimelineStorageUtils.readRelationship(
if (checkRelatesTo && !TimelineStorageUtils.matchRelations( entity, result, EntityColumnPrefix.RELATES_TO, false);
entity.getRelatesToEntities(), filters.getRelatesTo())) { if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity,
filters.getRelatesTo())) {
return null; return null;
} }
if (!fieldsToRetrieve.contains(Field.ALL) && if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.RELATES_TO)) {
!fieldsToRetrieve.contains(Field.RELATES_TO)) {
entity.getRelatesToEntities().clear(); entity.getRelatesToEntities().clear();
} }
} }
// fetch info // fetch info if fieldsToRetrieve contains INFO or ALL.
boolean checkInfo = filters != null && filters.getInfoFilters() != null && if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.INFO)) {
filters.getInfoFilters().size() > 0; TimelineStorageUtils.readKeyValuePairs(
if (fieldsToRetrieve.contains(Field.ALL) || entity, result, EntityColumnPrefix.INFO, false);
fieldsToRetrieve.contains(Field.INFO) || checkInfo) {
readKeyValuePairs(entity, result, EntityColumnPrefix.INFO, false);
if (checkInfo &&
!TimelineStorageUtils.matchFilters(
entity.getInfo(), filters.getInfoFilters())) {
return null;
}
if (!fieldsToRetrieve.contains(Field.ALL) &&
!fieldsToRetrieve.contains(Field.INFO)) {
entity.getInfo().clear();
}
} }
// fetch configs // fetch configs if fieldsToRetrieve contains CONFIGS or ALL.
boolean checkConfigs = if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.CONFIGS)) {
filters != null && filters.getConfigFilters() != null && TimelineStorageUtils.readKeyValuePairs(
filters.getConfigFilters().size() > 0; entity, result, EntityColumnPrefix.CONFIG, true);
if (fieldsToRetrieve.contains(Field.ALL) ||
fieldsToRetrieve.contains(Field.CONFIGS) || checkConfigs) {
readKeyValuePairs(entity, result, EntityColumnPrefix.CONFIG, true);
if (checkConfigs && !TimelineStorageUtils.matchFilters(
entity.getConfigs(), filters.getConfigFilters())) {
return null;
}
if (!fieldsToRetrieve.contains(Field.ALL) &&
!fieldsToRetrieve.contains(Field.CONFIGS)) {
entity.getConfigs().clear();
}
} }
// fetch events // fetch events and match event filters if they exist. If event filters do
// not match, entity would be dropped. We have to match filters locally
// as relevant HBase filters to filter out rows on the basis of events
// are not set in HBase scan.
boolean checkEvents = boolean checkEvents =
filters != null && filters.getEventFilters() != null && !isSingleEntityRead() && filters.getEventFilters() != null &&
filters.getEventFilters().size() > 0; filters.getEventFilters().getFilterList().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) || if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS) ||
fieldsToRetrieve.contains(Field.EVENTS) || checkEvents) { checkEvents) {
readEvents(entity, result, false); TimelineStorageUtils.readEvents(entity, result, EntityColumnPrefix.EVENT);
if (checkEvents && !TimelineStorageUtils.matchEventFilters( if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity,
entity.getEvents(), filters.getEventFilters())) { filters.getEventFilters())) {
return null; return null;
} }
if (!fieldsToRetrieve.contains(Field.ALL) && if (!TimelineStorageUtils.hasField(fieldsToRetrieve, Field.EVENTS)) {
!fieldsToRetrieve.contains(Field.EVENTS)) {
entity.getEvents().clear(); entity.getEvents().clear();
} }
} }
// fetch metrics // fetch metrics if fieldsToRetrieve contains METRICS or ALL.
boolean checkMetrics = if (TimelineStorageUtils.hasField(fieldsToRetrieve, Field.METRICS)) {
filters != null && filters.getMetricFilters() != null &&
filters.getMetricFilters().size() > 0;
if (fieldsToRetrieve.contains(Field.ALL) ||
fieldsToRetrieve.contains(Field.METRICS) || checkMetrics) {
readMetrics(entity, result, EntityColumnPrefix.METRIC); readMetrics(entity, result, EntityColumnPrefix.METRIC);
if (checkMetrics && !TimelineStorageUtils.matchMetricFilters(
entity.getMetrics(), filters.getMetricFilters())) {
return null;
}
if (!fieldsToRetrieve.contains(Field.ALL) &&
!fieldsToRetrieve.contains(Field.METRICS)) {
entity.getMetrics().clear();
}
} }
return entity; return entity;
} }
/**
* Helper method for reading relationship.
*
* @param <T> Describes the type of column prefix.
* @param entity entity to fill.
* @param result result from HBase.
* @param prefix column prefix.
* @param isRelatedTo if true, means relationship is to be added to
* isRelatedTo, otherwise its added to relatesTo.
* @throws IOException if any problem is encountered while reading result.
*/
protected <T> void readRelationship(
TimelineEntity entity, Result result, ColumnPrefix<T> prefix,
boolean isRelatedTo) throws IOException {
// isRelatedTo and relatesTo are of type Map<String, Set<String>>
Map<String, Object> columns = prefix.readResults(result);
for (Map.Entry<String, Object> column : columns.entrySet()) {
for (String id : Separator.VALUES.splitEncoded(
column.getValue().toString())) {
if (isRelatedTo) {
entity.addIsRelatedToEntity(column.getKey(), id);
} else {
entity.addRelatesToEntity(column.getKey(), id);
}
}
}
}
/**
* Helper method for reading key-value pairs for either info or config.
*
* @param <T> Describes the type of column prefix.
* @param entity entity to fill.
* @param result result from HBase.
* @param prefix column prefix.
* @param isConfig if true, means we are reading configs, otherwise info.
* @throws IOException if any problem is encountered while reading result.
*/
protected <T> void readKeyValuePairs(
TimelineEntity entity, Result result, ColumnPrefix<T> prefix,
boolean isConfig) throws IOException {
// info and configuration are of type Map<String, Object or String>
Map<String, Object> columns = prefix.readResults(result);
if (isConfig) {
for (Map.Entry<String, Object> column : columns.entrySet()) {
entity.addConfig(column.getKey(), column.getValue().toString());
}
} else {
entity.addInfo(columns);
}
}
/**
* Read events from the entity table or the application table. The column name
* is of the form "eventId=timestamp=infoKey" where "infoKey" may be omitted
* if there is no info associated with the event.
*
* @param entity entity to fill.
* @param result HBase Result.
* @param isApplication if true, event read is for application table,
* otherwise its being read for entity table.
* @throws IOException if any problem is encountered while reading result.
*
* See {@link EntityTable} and {@link ApplicationTable} for a more detailed
* schema description.
*/
protected void readEvents(TimelineEntity entity, Result result,
boolean isApplication) throws IOException {
Map<String, TimelineEvent> eventsMap = new HashMap<>();
Map<?, Object> eventsResult = isApplication ?
ApplicationColumnPrefix.EVENT.
readResultsHavingCompoundColumnQualifiers(result) :
EntityColumnPrefix.EVENT.
readResultsHavingCompoundColumnQualifiers(result);
for (Map.Entry<?, Object> eventResult : eventsResult.entrySet()) {
byte[][] karr = (byte[][])eventResult.getKey();
// the column name is of the form "eventId=timestamp=infoKey"
if (karr.length == 3) {
String id = Bytes.toString(karr[0]);
long ts = TimelineStorageUtils.invertLong(Bytes.toLong(karr[1]));
String key = Separator.VALUES.joinEncoded(id, Long.toString(ts));
TimelineEvent event = eventsMap.get(key);
if (event == null) {
event = new TimelineEvent();
event.setId(id);
event.setTimestamp(ts);
eventsMap.put(key, event);
}
// handle empty info
String infoKey = karr[2].length == 0 ? null : Bytes.toString(karr[2]);
if (infoKey != null) {
event.addInfo(infoKey, eventResult.getValue());
}
} else {
LOG.warn("incorrectly formatted column name: it will be discarded");
continue;
}
}
Set<TimelineEvent> eventsSet = new HashSet<>(eventsMap.values());
entity.addEvents(eventsSet);
}
} }

View File

@ -107,11 +107,60 @@ protected TimelineEntityReader(TimelineReaderContext ctxt,
/** /**
* Creates a {@link FilterList} based on fields, confs and metrics to * Creates a {@link FilterList} based on fields, confs and metrics to
* retrieve. This filter list will be set in Scan/Get objects to trim down * retrieve. This filter list will be set in Scan/Get objects to trim down
* results fetched from HBase back-end storage. * results fetched from HBase back-end storage. This is called only for
* multiple entity reads.
* *
* @return a {@link FilterList} object. * @return a {@link FilterList} object.
* @throws IOException if any problem occurs while creating filter list.
*/ */
protected abstract FilterList constructFilterListBasedOnFields(); protected abstract FilterList constructFilterListBasedOnFields()
throws IOException;
/**
* Creates a {@link FilterList} based on info, config and metric filters. This
* filter list will be set in HBase Get to trim down results fetched from
* HBase back-end storage.
*
* @return a {@link FilterList} object.
* @throws IOException if any problem occurs while creating filter list.
*/
protected abstract FilterList constructFilterListBasedOnFilters()
throws IOException;
/**
* Combines filter lists created based on fields and based on filters.
*
* @return a {@link FilterList} object if it can be constructed. Returns null,
* if filter list cannot be created either on the basis of filters or on the
* basis of fields.
* @throws IOException if any problem occurs while creating filter list.
*/
private FilterList createFilterList() throws IOException {
FilterList listBasedOnFilters = constructFilterListBasedOnFilters();
boolean hasListBasedOnFilters = listBasedOnFilters != null &&
!listBasedOnFilters.getFilters().isEmpty();
FilterList listBasedOnFields = constructFilterListBasedOnFields();
boolean hasListBasedOnFields = listBasedOnFields != null &&
!listBasedOnFields.getFilters().isEmpty();
// If filter lists based on both filters and fields can be created,
// combine them in a new filter list and return it.
// If either one of them has been created, return that filter list.
// Return null, if none of the filter lists can be created. This indicates
// that no filter list needs to be added to HBase Scan as filters are not
// specified for the query or only the default view of entity needs to be
// returned.
if (hasListBasedOnFilters && hasListBasedOnFields) {
FilterList list = new FilterList();
list.addFilter(listBasedOnFilters);
list.addFilter(listBasedOnFields);
return list;
} else if (hasListBasedOnFilters) {
return listBasedOnFilters;
} else if (hasListBasedOnFields) {
return listBasedOnFields;
}
return null;
}
protected TimelineReaderContext getContext() { protected TimelineReaderContext getContext() {
return context; return context;
@ -125,6 +174,16 @@ protected TimelineEntityFilters getFilters() {
return filters; return filters;
} }
/**
* Create a {@link TimelineEntityFilters} object with default values for
* filters.
*/
protected void createFiltersIfNull() {
if (filters == null) {
filters = new TimelineEntityFilters();
}
}
/** /**
* Reads and deserializes a single timeline entity from the HBase storage. * Reads and deserializes a single timeline entity from the HBase storage.
* *
@ -140,6 +199,9 @@ public TimelineEntity readEntity(Configuration hbaseConf, Connection conn)
augmentParams(hbaseConf, conn); augmentParams(hbaseConf, conn);
FilterList filterList = constructFilterListBasedOnFields(); FilterList filterList = constructFilterListBasedOnFields();
if (LOG.isDebugEnabled() && filterList != null) {
LOG.debug("FilterList created for get is - " + filterList);
}
Result result = getResult(hbaseConf, conn, filterList); Result result = getResult(hbaseConf, conn, filterList);
if (result == null || result.isEmpty()) { if (result == null || result.isEmpty()) {
// Could not find a matching row. // Could not find a matching row.
@ -166,7 +228,10 @@ public Set<TimelineEntity> readEntities(Configuration hbaseConf,
augmentParams(hbaseConf, conn); augmentParams(hbaseConf, conn);
NavigableSet<TimelineEntity> entities = new TreeSet<>(); NavigableSet<TimelineEntity> entities = new TreeSet<>();
FilterList filterList = constructFilterListBasedOnFields(); FilterList filterList = createFilterList();
if (LOG.isDebugEnabled() && filterList != null) {
LOG.debug("FilterList created for scan is - " + filterList);
}
ResultScanner results = getResults(hbaseConf, conn, filterList); ResultScanner results = getResults(hbaseConf, conn, filterList);
try { try {
for (Result result : results) { for (Result result : results) {

View File

@ -221,7 +221,7 @@ public void testGetEntityCustomFields() throws Exception {
assertTrue("UID should be present", assertTrue("UID should be present",
entity.getInfo().containsKey(TimelineReaderManager.UID_KEY)); entity.getInfo().containsKey(TimelineReaderManager.UID_KEY));
// Includes UID. // Includes UID.
assertEquals(2, entity.getInfo().size()); assertEquals(3, entity.getInfo().size());
// No events will be returned as events are not part of fields. // No events will be returned as events are not part of fields.
assertEquals(0, entity.getEvents().size()); assertEquals(0, entity.getEvents().size());
} finally { } finally {
@ -247,7 +247,7 @@ public void testGetEntityAllFields() throws Exception {
assertTrue("UID should be present", assertTrue("UID should be present",
entity.getInfo().containsKey(TimelineReaderManager.UID_KEY)); entity.getInfo().containsKey(TimelineReaderManager.UID_KEY));
// Includes UID. // Includes UID.
assertEquals(2, entity.getInfo().size()); assertEquals(3, entity.getInfo().size());
assertEquals(2, entity.getEvents().size()); assertEquals(2, entity.getEvents().size());
} finally { } finally {
client.destroy(); client.destroy();
@ -443,10 +443,8 @@ public void testGetEntitiesByConfigFilters() throws Exception {
resp.getEntity(new GenericType<Set<TimelineEntity>>(){}); resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
assertNotNull(entities); assertNotNull(entities);
assertEquals(2, entities.size()); assertEquals(1, entities.size());
assertTrue("Entities with id_1 and id_3 should have been present" + assertTrue("Entity with id_3 should have been present in response.",
" in response.",
entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_3"))); entities.contains(newEntity("app", "id_3")));
} finally { } finally {
client.destroy(); client.destroy();

View File

@ -23,6 +23,7 @@
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.util.Arrays;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -40,6 +41,13 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList.Operator;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -112,6 +120,7 @@ private static void loadEntityData() throws Exception {
entity11.setCreatedTime(1425016502000L); entity11.setCreatedTime(1425016502000L);
Map<String, Object> info1 = new HashMap<String, Object>(); Map<String, Object> info1 = new HashMap<String, Object>();
info1.put("info1", "val1"); info1.put("info1", "val1");
info1.put("info2", "val5");
entity11.addInfo(info1); entity11.addInfo(info1);
TimelineEvent event = new TimelineEvent(); TimelineEvent event = new TimelineEvent();
event.setId("event_1"); event.setId("event_1");
@ -121,7 +130,7 @@ private static void loadEntityData() throws Exception {
TimelineMetric metric1 = new TimelineMetric(); TimelineMetric metric1 = new TimelineMetric();
metric1.setId("metric1"); metric1.setId("metric1");
metric1.setType(TimelineMetric.Type.SINGLE_VALUE); metric1.setType(TimelineMetric.Type.SINGLE_VALUE);
metric1.addValue(1425016502006L, 113.2F); metric1.addValue(1425016502006L, 113);
metrics.add(metric1); metrics.add(metric1);
TimelineMetric metric2 = new TimelineMetric(); TimelineMetric metric2 = new TimelineMetric();
metric2.setId("metric2"); metric2.setId("metric2");
@ -130,7 +139,7 @@ private static void loadEntityData() throws Exception {
metrics.add(metric2); metrics.add(metric2);
entity11.setMetrics(metrics); entity11.setMetrics(metrics);
Map<String,String> configs = new HashMap<String, String>(); Map<String,String> configs = new HashMap<String, String>();
configs.put("config_1", "123"); configs.put("config_1", "127");
entity11.setConfigs(configs); entity11.setConfigs(configs);
entity11.addRelatesToEntity("flow", "flow1"); entity11.addRelatesToEntity("flow", "flow1");
entity11.addIsRelatedToEntity("type1", "tid1_1"); entity11.addIsRelatedToEntity("type1", "tid1_1");
@ -171,7 +180,7 @@ private static void loadEntityData() throws Exception {
info1.put("info2", 4); info1.put("info2", 4);
entity2.addInfo(info2); entity2.addInfo(info2);
Map<String,String> configs2 = new HashMap<String, String>(); Map<String,String> configs2 = new HashMap<String, String>();
configs2.put("config_1", "123"); configs2.put("config_1", "129");
configs2.put("config_3", "def"); configs2.put("config_3", "def");
entity2.setConfigs(configs2); entity2.setConfigs(configs2);
TimelineEvent event2 = new TimelineEvent(); TimelineEvent event2 = new TimelineEvent();
@ -182,7 +191,7 @@ private static void loadEntityData() throws Exception {
TimelineMetric metric21 = new TimelineMetric(); TimelineMetric metric21 = new TimelineMetric();
metric21.setId("metric1"); metric21.setId("metric1");
metric21.setType(TimelineMetric.Type.SINGLE_VALUE); metric21.setType(TimelineMetric.Type.SINGLE_VALUE);
metric21.addValue(1425016501006L, 123.2F); metric21.addValue(1425016501006L, 300);
metrics2.add(metric21); metrics2.add(metric21);
TimelineMetric metric22 = new TimelineMetric(); TimelineMetric metric22 = new TimelineMetric();
metric22.setId("metric2"); metric22.setId("metric2");
@ -205,6 +214,7 @@ private static void loadEntityData() throws Exception {
entity3.setCreatedTime(1425016501050L); entity3.setCreatedTime(1425016501050L);
Map<String, Object> info3 = new HashMap<String, Object>(); Map<String, Object> info3 = new HashMap<String, Object>();
info3.put("info2", 3.5); info3.put("info2", 3.5);
info3.put("info4", 20);
entity3.addInfo(info3); entity3.addInfo(info3);
Map<String,String> configs3 = new HashMap<String, String>(); Map<String,String> configs3 = new HashMap<String, String>();
configs3.put("config_1", "123"); configs3.put("config_1", "123");
@ -222,7 +232,7 @@ private static void loadEntityData() throws Exception {
TimelineMetric metric31 = new TimelineMetric(); TimelineMetric metric31 = new TimelineMetric();
metric31.setId("metric1"); metric31.setId("metric1");
metric31.setType(TimelineMetric.Type.SINGLE_VALUE); metric31.setType(TimelineMetric.Type.SINGLE_VALUE);
metric31.addValue(1425016501006L, 124.8F); metric31.addValue(1425016501006L, 124);
metrics3.add(metric31); metrics3.add(metric31);
TimelineMetric metric32 = new TimelineMetric(); TimelineMetric metric32 = new TimelineMetric();
metric32.setId("metric2"); metric32.setId("metric2");
@ -317,7 +327,7 @@ public void testGetEntityCustomFields() throws Exception {
Assert.assertEquals(1425016502000L, result.getCreatedTime()); Assert.assertEquals(1425016502000L, result.getCreatedTime());
Assert.assertEquals(3, result.getConfigs().size()); Assert.assertEquals(3, result.getConfigs().size());
Assert.assertEquals(3, result.getMetrics().size()); Assert.assertEquals(3, result.getMetrics().size());
Assert.assertEquals(1, result.getInfo().size()); Assert.assertEquals(2, result.getInfo().size());
// No events will be returned // No events will be returned
Assert.assertEquals(0, result.getEvents().size()); Assert.assertEquals(0, result.getEvents().size());
} }
@ -344,8 +354,8 @@ public void testGetAllEntities() throws Exception {
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), new TimelineEntityFilters(), "app", null), new TimelineEntityFilters(),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
// All 3 entities will be returned // All 4 entities will be returned
Assert.assertEquals(4, result.size()); Assert.assertEquals(4, result.size());
} }
@ -425,12 +435,13 @@ public void testGetEntitiesByTimeWindows() throws Exception {
@Test @Test
public void testGetFilteredEntities() throws Exception { public void testGetFilteredEntities() throws Exception {
// Get entities based on info filters. // Get entities based on info filters.
Map<String, Object> infoFilters = new HashMap<String, Object>(); TimelineFilterList infoFilterList = new TimelineFilterList();
infoFilters.put("info2", 3.5); infoFilterList.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilters, new TimelineEntityFilters(null, null, null, null, null, infoFilterList,
null, null, null), null, null, null),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size()); Assert.assertEquals(1, result.size());
@ -442,26 +453,30 @@ public void testGetFilteredEntities() throws Exception {
} }
// Get entities based on config filters. // Get entities based on config filters.
Map<String, String> configFilters = new HashMap<String, String>(); TimelineFilterList confFilterList = new TimelineFilterList();
configFilters.put("config_1", "123"); confFilterList.addFilter(
configFilters.put("config_3", "abc"); new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "123"));
confFilterList.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters(null, null, null, null, null, null, new TimelineEntityFilters(null, null, null, null, null, null,
configFilters, null, null), confFilterList, null, null),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); Assert.assertEquals(1, result.size());
for (TimelineEntity entity : result) { for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) { if (!entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on config filters"); Assert.fail("Incorrect filtering based on config filters");
} }
} }
// Get entities based on event filters. // Get entities based on event filters.
Set<String> eventFilters = new HashSet<String>(); TimelineFilterList eventFilters = new TimelineFilterList();
eventFilters.add("event_2"); eventFilters.addFilter(
eventFilters.add("event_4"); new TimelineExistsFilter(TimelineCompareOp.EQUAL,"event_2"));
eventFilters.addFilter(
new TimelineExistsFilter(TimelineCompareOp.EQUAL,"event_4"));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
@ -476,13 +491,14 @@ public void testGetFilteredEntities() throws Exception {
} }
// Get entities based on metric filters. // Get entities based on metric filters.
Set<String> metricFilters = new HashSet<String>(); TimelineFilterList metricFilterList = new TimelineFilterList();
metricFilters.add("metric3"); metricFilterList.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, "metric3", 0L));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
new TimelineEntityFilters(null, null, null, null, null, null, null, new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilters, null), metricFilterList, null),
new TimelineDataToRetrieve()); new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size()); Assert.assertEquals(2, result.size());
// Two entities with IDs' id_1 and id_2 should be returned. // Two entities with IDs' id_1 and id_2 should be returned.
@ -491,15 +507,266 @@ public void testGetFilteredEntities() throws Exception {
Assert.fail("Incorrect filtering based on metric filters"); Assert.fail("Incorrect filtering based on metric filters");
} }
} }
}
// Get entities based on complex config filters.
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "129"));
list1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "def"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23"));
list2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc"));
TimelineFilterList confFilterList1 =
new TimelineFilterList(Operator.OR, list1, list2);
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList1, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters");
}
}
TimelineFilterList list3 = new TimelineFilterList();
list3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_1", "123"));
list3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_3", "abc"));
TimelineFilterList list4 = new TimelineFilterList();
list4.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23"));
TimelineFilterList confFilterList2 =
new TimelineFilterList(Operator.OR, list3, list4);
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList2, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters");
}
}
TimelineFilterList confFilterList3 = new TimelineFilterList();
confFilterList3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_1", "127"));
confFilterList3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_3", "abc"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList3, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
for(TimelineEntity entity : result) {
if (!entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters");
}
}
TimelineFilterList confFilterList4 = new TimelineFilterList();
confFilterList4.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_dummy", "dummy"));
confFilterList4.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_3", "def"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList4, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size());
TimelineFilterList confFilterList5 = new TimelineFilterList(Operator.OR);
confFilterList5.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_dummy", "dummy"));
confFilterList5.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_3", "def"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList5, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on config filters");
}
}
// Get entities based on complex metric filters.
TimelineFilterList list6 = new TimelineFilterList();
list6.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_THAN, "metric1", 200));
list6.addFilter(new TimelineCompareFilter(
TimelineCompareOp.EQUAL, "metric3", 23));
TimelineFilterList list7 = new TimelineFilterList();
list7.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, "metric2", 74));
TimelineFilterList metricFilterList1 =
new TimelineFilterList(Operator.OR, list6, list7);
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList1, null),
new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
// Two entities with IDs' id_2 and id_3 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2") && !entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList metricFilterList2 = new TimelineFilterList();
metricFilterList2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, "metric2", 70));
metricFilterList2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList2, null),
new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) {
Assert.fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList metricFilterList3 = new TimelineFilterList();
metricFilterList3.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, "dummy_metric", 30));
metricFilterList3.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList3, null),
new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size());
TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR);
metricFilterList4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, "dummy_metric", 30));
metricFilterList4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList4, null),
new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList metricFilterList5 =
new TimelineFilterList(new TimelineCompareFilter(
TimelineCompareOp.NOT_EQUAL, "metric2", 74));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList5, null),
new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
Assert.fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList infoFilterList1 = new TimelineFilterList();
infoFilterList1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
infoFilterList1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "info4", 20));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList1,
null, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size());
TimelineFilterList infoFilterList2 = new TimelineFilterList(Operator.OR);
infoFilterList2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
infoFilterList2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info1", "val1"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList2,
null, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on info filters");
}
}
TimelineFilterList infoFilterList3 = new TimelineFilterList();
infoFilterList3.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1));
infoFilterList3.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList3,
null, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(0, result.size());
TimelineFilterList infoFilterList4 = new TimelineFilterList(Operator.OR);
infoFilterList4.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1));
infoFilterList4.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList4,
null, null, null),
new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) {
Assert.fail("Incorrect filtering based on info filters");
}
}
}
@Test @Test
public void testGetEntitiesByRelations() throws Exception { public void testGetEntitiesByRelations() throws Exception {
// Get entities based on relatesTo. // Get entities based on relatesTo.
Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>(); TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR);
Set<String> relatesToIds = new HashSet<String>(); Set<Object> relatesToIds =
relatesToIds.add("flow1"); new HashSet<Object>(Arrays.asList((Object)"flow1"));
relatesTo.put("flow", relatesToIds); relatesTo.addFilter(new TimelineKeyValuesFilter(
TimelineCompareOp.EQUAL, "flow", relatesToIds));
Set<TimelineEntity> result = reader.getEntities( Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),
@ -515,10 +782,11 @@ public void testGetEntitiesByRelations() throws Exception {
} }
// Get entities based on isRelatedTo. // Get entities based on isRelatedTo.
Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>(); TimelineFilterList isRelatedTo = new TimelineFilterList(Operator.OR);
Set<String> isRelatedToIds = new HashSet<String>(); Set<Object> isRelatedToIds =
isRelatedToIds.add("tid1_2"); new HashSet<Object>(Arrays.asList((Object)"tid1_2"));
isRelatedTo.put("type1", isRelatedToIds); isRelatedTo.addFilter(new TimelineKeyValuesFilter(
TimelineCompareOp.EQUAL, "type1", isRelatedToIds));
result = reader.getEntities( result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null), "app", null),

View File

@ -154,6 +154,14 @@ static TimelineEntity getEntityMetricsApp1(long insertTs) {
metrics.add(m2); metrics.add(m2);
entity.addMetrics(metrics); entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
long endTs = 1439379885000L;
event.setTimestamp(endTs);
String expKey = "foo_event_greater";
String expVal = "test_app_greater";
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity; return entity;
} }
@ -178,6 +186,14 @@ static TimelineEntity getEntityMetricsApp2(long insertTs) {
m1.setValues(metricValues); m1.setValues(metricValues);
metrics.add(m1); metrics.add(m1);
entity.addMetrics(metrics); entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
long endTs = 1439379885000L;
event.setTimestamp(endTs);
String expKey = "foo_event_greater";
String expVal = "test_app_greater";
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity; return entity;
} }

View File

@ -47,8 +47,10 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList.Operator;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
@ -307,7 +309,7 @@ private void checkFlowRunTable(String cluster, String user, String flow,
assertEquals(141L, Bytes.toLong(values.get(q))); assertEquals(141L, Bytes.toLong(values.get(q)));
// check metric2 // check metric2
assertEquals(2, values.size()); assertEquals(3, values.size());
q = ColumnHelper.getColumnQualifier( q = ColumnHelper.getColumnQualifier(
FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric2); FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric2);
assertTrue(values.containsKey(q)); assertTrue(values.containsKey(q));
@ -318,11 +320,10 @@ private void checkFlowRunTable(String cluster, String user, String flow,
@Test @Test
public void testWriteFlowRunMetricsPrefix() throws Exception { public void testWriteFlowRunMetricsPrefix() throws Exception {
String cluster = "testWriteFlowRunMetricsOneFlow_cluster1"; String cluster = "testWriteFlowRunMetricsPrefix_cluster1";
String user = "testWriteFlowRunMetricsOneFlow_user1"; String user = "testWriteFlowRunMetricsPrefix_user1";
String flow = "testing_flowRun_metrics_flow_name"; String flow = "testWriteFlowRunMetricsPrefix_flow_name";
String flowVersion = "CF7022C10F1354"; String flowVersion = "CF7022C10F1354";
long runid = 1002345678919L;
TimelineEntities te = new TimelineEntities(); TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator TimelineEntity entityApp1 = TestFlowDataGenerator
@ -335,33 +336,30 @@ public void testWriteFlowRunMetricsPrefix() throws Exception {
hbi = new HBaseTimelineWriterImpl(c1); hbi = new HBaseTimelineWriterImpl(c1);
hbi.init(c1); hbi.init(c1);
String appName = "application_11111111111111_1111"; String appName = "application_11111111111111_1111";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te); hbi.write(cluster, user, flow, flowVersion, 1002345678919L, appName, te);
// write another application with same metric to this flow // write another application with same metric to this flow
te = new TimelineEntities(); te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator TimelineEntity entityApp2 = TestFlowDataGenerator
.getEntityMetricsApp2(System.currentTimeMillis()); .getEntityMetricsApp2(System.currentTimeMillis());
te.addEntity(entityApp2); te.addEntity(entityApp2);
appName = "application_11111111111111_2222"; appName = "application_11111111111111_2222";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te); hbi.write(cluster, user, flow, flowVersion, 1002345678918L, appName, te);
hbi.flush(); hbi.flush();
} finally { } finally {
hbi.close(); hbi.close();
} }
// check flow run
checkFlowRunTable(cluster, user, flow, runid, c1);
// use the timeline reader to verify data // use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null; HBaseTimelineReaderImpl hbr = null;
try { try {
hbr = new HBaseTimelineReaderImpl(); hbr = new HBaseTimelineReaderImpl();
hbr.init(c1); hbr.init(c1);
hbr.start(); hbr.start();
TimelineFilterList metricsToRetrieve = TimelineFilterList metricsToRetrieve = new TimelineFilterList(
new TimelineFilterList(new TimelinePrefixFilter(TimelineCompareOp.EQUAL, Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL,
metric1.substring(0, metric1.indexOf("_") + 1))); metric1.substring(0, metric1.indexOf("_") + 1)));
TimelineEntity entity = hbr.getEntity( TimelineEntity entity = hbr.getEntity(
new TimelineReaderContext(cluster, user, flow, runid, null, new TimelineReaderContext(cluster, user, flow, 1002345678919L, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null), TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineDataToRetrieve(null, metricsToRetrieve, null)); new TimelineDataToRetrieve(null, metricsToRetrieve, null));
assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType())); assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
@ -377,7 +375,7 @@ public void testWriteFlowRunMetricsPrefix() throws Exception {
} }
switch (id) { switch (id) {
case metric1: case metric1:
assertEquals(141L, value); assertEquals(40L, value);
break; break;
default: default:
fail("unrecognized metric: " + id); fail("unrecognized metric: " + id);
@ -385,31 +383,16 @@ public void testWriteFlowRunMetricsPrefix() throws Exception {
} }
Set<TimelineEntity> entities = hbr.getEntities( Set<TimelineEntity> entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, runid, null, new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null), TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(), new TimelineEntityFilters(),
new TimelineDataToRetrieve(null, metricsToRetrieve, null)); new TimelineDataToRetrieve(null, metricsToRetrieve, null));
assertEquals(1, entities.size()); assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity timelineEntity : entities) { for (TimelineEntity timelineEntity : entities) {
Set<TimelineMetric> timelineMetrics = timelineEntity.getMetrics(); metricCnt += timelineEntity.getMetrics().size();
assertEquals(1, timelineMetrics.size());
for (TimelineMetric metric : timelineMetrics) {
String id = metric.getId();
Map<Long, Number> values = metric.getValues();
assertEquals(1, values.size());
Number value = null;
for (Number n : values.values()) {
value = n;
}
switch (id) {
case metric1:
assertEquals(141L, value);
break;
default:
fail("unrecognized metric: " + id);
}
}
} }
assertEquals(2, metricCnt);
} finally { } finally {
hbr.close(); hbr.close();
} }
@ -417,9 +400,9 @@ public void testWriteFlowRunMetricsPrefix() throws Exception {
@Test @Test
public void testWriteFlowRunsMetricFields() throws Exception { public void testWriteFlowRunsMetricFields() throws Exception {
String cluster = "testWriteFlowRunMetricsOneFlow_cluster1"; String cluster = "testWriteFlowRunsMetricFields_cluster1";
String user = "testWriteFlowRunMetricsOneFlow_user1"; String user = "testWriteFlowRunsMetricFields_user1";
String flow = "testing_flowRun_metrics_flow_name"; String flow = "testWriteFlowRunsMetricFields_flow_name";
String flowVersion = "CF7022C10F1354"; String flowVersion = "CF7022C10F1354";
long runid = 1002345678919L; long runid = 1002345678919L;
@ -592,6 +575,214 @@ private void checkMinMaxFlush(Configuration c1, long minTS, long startTs,
} }
} }
@Test
public void testFilterFlowRunsByCreatedTime() throws Exception {
String cluster = "cluster2";
String user = "user2";
String flow = "flow_name2";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(
System.currentTimeMillis());
entityApp1.setCreatedTime(1425016501000L);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl(c1);
hbi.init(c1);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678919L,
"application_11111111111111_1111", te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(
System.currentTimeMillis());
entityApp2.setCreatedTime(1425016502000L);
te.addEntity(entityApp2);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678918L,
"application_11111111111111_2222", te);
hbi.flush();
} finally {
hbi.close();
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow,
null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, 1425016501000L, 1425016502001L, null,
null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("user2@flow_name2/1002345678918") &&
!entity.getId().equals("user2@flow_name2/1002345678919")) {
fail("Entities with flow runs 1002345678918 and 1002345678919" +
"should be present.");
}
}
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, 1425016501050L, null, null, null,
null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("user2@flow_name2/1002345678918")) {
fail("Entity with flow run 1002345678918 should be present.");
}
}
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, 1425016501050L, null, null,
null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("user2@flow_name2/1002345678919")) {
fail("Entity with flow run 1002345678919 should be present.");
}
}
} finally {
hbr.close();
}
}
@Test
public void testMetricFilters() throws Exception {
String cluster = "cluster1";
String user = "user1";
String flow = "flow_name1";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(
System.currentTimeMillis());
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl(c1);
hbi.init(c1);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678919L,
"application_11111111111111_1111", te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(
System.currentTimeMillis());
te.addEntity(entityApp2);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678918L,
"application_11111111111111_2222", te);
hbi.flush();
} finally {
hbi.close();
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, metric1, 101));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, metric1, 43));
list2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.EQUAL, metric2, 57));
TimelineFilterList metricFilterList =
new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null,
null, TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList, null),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(3, metricCnt);
TimelineFilterList metricFilterList1 = new TimelineFilterList(
new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, metric1, 127),
new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, metric2, 30));
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList1, null),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
assertEquals(1, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(2, metricCnt);
TimelineFilterList metricFilterList2 = new TimelineFilterList(
new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, metric1, 32),
new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, metric2, 57));
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList2, null),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList3 = new TimelineFilterList(
new TimelineCompareFilter(TimelineCompareOp.EQUAL, "s_metric", 32));
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList3, null),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
assertEquals(0, entities.size());
TimelineFilterList list3 = new TimelineFilterList();
list3.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, metric1, 101));
TimelineFilterList list4 = new TimelineFilterList();
list4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, metric1, 43));
list4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.EQUAL, metric2, 57));
TimelineFilterList metricFilterList4 =
new TimelineFilterList(Operator.OR, list3, list4);
TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR,
new TimelinePrefixFilter(TimelineCompareOp.EQUAL,
metric2.substring(0, metric2.indexOf("_") + 1)));
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList4, null),
new TimelineDataToRetrieve(null, metricsToRetrieve,
EnumSet.of(Field.ALL)));
assertEquals(2, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(1, metricCnt);
} finally {
hbr.close();
}
}
@AfterClass @AfterClass
public static void tearDownAfterClass() throws Exception { public static void tearDownAfterClass() throws Exception {
util.shutdownMiniCluster(); util.shutdownMiniCluster();

View File

@ -148,7 +148,7 @@ public void testWriteFlowRunCompaction() throws Exception {
} }
// check flow run for one flow many apps // check flow run for one flow many apps
checkFlowRunTable(cluster, user, flow, runid, c1, 3); checkFlowRunTable(cluster, user, flow, runid, c1, 4);
} }

View File

@ -0,0 +1,62 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
* Filter class which represents filter to be applied based on existence of a
* value.
*/
@Private
@Unstable
public class TimelineExistsFilter extends TimelineFilter {
private final TimelineCompareOp compareOp;
private final String value;
public TimelineExistsFilter(TimelineCompareOp op, String value) {
this.value = value;
if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) {
throw new IllegalArgumentException("CompareOp for exists filter should " +
"be EQUAL or NOT_EQUAL");
}
this.compareOp = op;
}
@Override
public TimelineFilterType getFilterType() {
return TimelineFilterType.EXISTS;
}
public String getValue() {
return value;
}
public TimelineCompareOp getCompareOp() {
return compareOp;
}
@Override
public String toString() {
return String.format("%s (%s %s)",
this.getClass().getSimpleName(), this.compareOp.name(), this.value);
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
* Filter class which represents filter to be applied based on key-value pair
* being equal or not to the values in back-end store.
*/
@Private
@Unstable
public class TimelineKeyValueFilter extends TimelineCompareFilter {
public TimelineKeyValueFilter(TimelineCompareOp op, String key, Object val,
boolean keyMustExistFlag) {
super(op, key, val, keyMustExistFlag);
if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) {
throw new IllegalArgumentException("TimelineCompareOp for equality"
+ " filter should be EQUAL or NOT_EQUAL");
}
}
public TimelineKeyValueFilter(TimelineCompareOp op, String key, Object val) {
this(op, key, val, true);
}
@Override
public TimelineFilterType getFilterType() {
return TimelineFilterType.KEY_VALUE;
}
}

View File

@ -0,0 +1,71 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
* Filter class which represents filter to be applied based on multiple values
* for a key and these values being equal or not equal to values in back-end
* store.
*/
@Private
@Unstable
public class TimelineKeyValuesFilter extends TimelineFilter {
private final TimelineCompareOp compareOp;
private final String key;
private final Set<Object> values;
public TimelineKeyValuesFilter(TimelineCompareOp op, String key,
Set<Object> values) {
if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) {
throw new IllegalArgumentException("TimelineCompareOp for multi value "
+ "equality filter should be EQUAL or NOT_EQUAL");
}
this.compareOp = op;
this.key = key;
this.values = values;
}
@Override
public TimelineFilterType getFilterType() {
return TimelineFilterType.KEY_VALUES;
}
public String getKey() {
return key;
}
public Set<Object> getValues() {
return values;
}
public TimelineCompareOp getCompareOp() {
return compareOp;
}
@Override
public String toString() {
return String.format("%s (%s, %s:%s)",
this.getClass().getSimpleName(), this.compareOp.name(),
this.key, (values == null) ? "" : values.toString());
}
}

View File

@ -0,0 +1,71 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.storage.common;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter.TimelineFilterType;
/**
* Used to define which filter to match.
*/
enum TimelineEntityFiltersType {
CONFIG {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUE;
}
},
INFO {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUE;
}
},
METRIC {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.COMPARE;
}
},
EVENT {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.EXISTS;
}
},
IS_RELATED_TO {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUES;
}
},
RELATES_TO {
boolean isValidFilter(TimelineFilterType filterType) {
return filterType == TimelineFilterType.LIST ||
filterType == TimelineFilterType.KEY_VALUES;
}
};
/**
* Checks whether filter type is valid for the filter being matched.
*
* @param filterType filter type.
* @return true, if its a valid filter, false otherwise.
*/
abstract boolean isValidFilter(TimelineFilterType filterType);
}