YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2b2df86c Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2b2df86c Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2b2df86c Branch: refs/heads/YARN-2928 Commit: 2b2df86c775f008c258fe128d3686f4a4d3b1a52 Parents: 408f001 Author: Sangjin Lee <sj...@apache.org> Authored: Mon Apr 11 21:07:32 2016 -0700 Committer: Li Lu <gtcarre...@apache.org> Committed: Wed May 4 16:35:33 2016 -0700 ---------------------------------------------------------------------- .../reader/TimelineEntityFilters.java | 170 +- .../reader/TimelineReaderWebServicesUtils.java | 88 +- .../reader/filter/TimelineCompareFilter.java | 35 +- .../reader/filter/TimelineExistsFilter.java | 62 + .../reader/filter/TimelineFilter.java | 16 +- .../reader/filter/TimelineFilterList.java | 14 + .../reader/filter/TimelineFilterUtils.java | 206 +- .../reader/filter/TimelineKeyValueFilter.java | 48 + .../reader/filter/TimelineKeyValuesFilter.java | 71 + .../reader/filter/TimelinePrefixFilter.java | 6 + .../storage/FileSystemTimelineReaderImpl.java | 36 +- .../storage/HBaseTimelineWriterImpl.java | 31 +- .../storage/application/ApplicationColumn.java | 28 +- .../application/ApplicationColumnPrefix.java | 37 +- .../storage/apptoflow/AppToFlowColumn.java | 16 + .../timelineservice/storage/common/Column.java | 17 + .../storage/common/ColumnHelper.java | 16 + .../storage/common/ColumnPrefix.java | 35 + .../common/TimelineEntityFiltersType.java | 71 + .../storage/common/TimelineStorageUtils.java | 461 +++- .../storage/entity/EntityColumn.java | 28 +- .../storage/entity/EntityColumnPrefix.java | 38 +- .../storage/flow/FlowActivityColumnPrefix.java | 35 + .../storage/flow/FlowRunColumn.java | 3 + .../storage/flow/FlowRunColumnPrefix.java | 26 + .../storage/flow/FlowScanner.java | 1 + .../storage/reader/ApplicationEntityReader.java | 426 ++-- .../reader/FlowActivityEntityReader.java | 7 + .../storage/reader/FlowRunEntityReader.java | 97 +- .../storage/reader/GenericEntityReader.java | 623 ++--- .../storage/reader/TimelineEntityReader.java | 71 +- .../reader/TestTimelineReaderWebServices.java | 10 +- .../TestFileSystemTimelineReaderImpl.java | 332 ++- .../storage/TestHBaseTimelineStorage.java | 2172 +++++++++++++++++- .../storage/flow/TestFlowDataGenerator.java | 16 + .../storage/flow/TestHBaseStorageFlowRun.java | 267 ++- .../flow/TestHBaseStorageFlowRunCompaction.java | 2 +- .../reader/filter/TimelineExistsFilter.java | 62 + .../reader/filter/TimelineKeyValueFilter.java | 48 + .../reader/filter/TimelineKeyValuesFilter.java | 71 + .../common/TimelineEntityFiltersType.java | 71 + 41 files changed, 5054 insertions(+), 816 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java index 5b2c300..4821d31 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java @@ -18,11 +18,14 @@ package org.apache.hadoop.yarn.server.timelineservice.reader; -import java.util.Map; -import java.util.Set; - import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter; /** * Encapsulates information regarding the filters to apply while querying. These @@ -36,36 +39,81 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; * <li><b>createdTimeEnd</b> - Matched entities should not be created after * this timestamp. If null or {@literal <=0}, defaults to * {@link Long#MAX_VALUE}.</li> - * <li><b>relatesTo</b> - Matched entities should relate to given entities. - * If null or empty, the relations are not matched.</li> - * <li><b>isRelatedTo</b> - Matched entities should be related to given - * entities. If null or empty, the relations are not matched.</li> + * <li><b>relatesTo</b> - Matched entities should or should not relate to given + * entities depending on what's specified in the filter. The entities in + * relatesTo are identified by entity type and id. This is represented as + * a {@link TimelineFilterList} object containing + * {@link TimelineKeyValuesFilter} objects, each of which contains a + * set of values for a key and the comparison operator (equals/not equals). The + * key which represents the entity type is a string and values are a set of + * entity identifiers (also string). As it is a filter list, relatesTo can be + * evaluated with logical AND/OR and we can create a hierarchy of these + * {@link TimelineKeyValuesFilter} objects. If null or empty, the relations are + * not matched.</li> + * <li><b>isRelatedTo</b> - Matched entities should or should not be related + * to given entities depending on what's specified in the filter. The entities + * in isRelatedTo are identified by entity type and id. This is represented as + * a {@link TimelineFilterList} object containing + * {@link TimelineKeyValuesFilter} objects, each of which contains a + * set of values for a key and the comparison operator (equals/not equals). The + * key which represents the entity type is a string and values are a set of + * entity identifiers (also string). As it is a filter list, relatesTo can be + * evaluated with logical AND/OR and we can create a hierarchy of these + * {@link TimelineKeyValuesFilter} objects. If null or empty, the relations are + * not matched.</li> * <li><b>infoFilters</b> - Matched entities should have exact matches to - * the given info represented as key-value pairs. If null or empty, the - * filter is not applied.</li> + * the given info and should be either equal or not equal to given value + * depending on what's specified in the filter. This is represented as a + * {@link TimelineFilterList} object containing {@link TimelineKeyValueFilter} + * objects, each of which contains key-value pairs with a comparison operator + * (equals/not equals). The key which represents the info key is a string but + * value can be any object. As it is a filter list, info filters can be + * evaluated with logical AND/OR and we can create a hierarchy of these + * key-value pairs. If null or empty, the filter is not applied.</li> * <li><b>configFilters</b> - Matched entities should have exact matches to - * the given configs represented as key-value pairs. If null or empty, the - * filter is not applied.</li> + * the given configurations and should be either equal or not equal to given + * value depending on what's specified in the filter. This is represented as a + * {@link TimelineFilterList} object containing {@link TimelineKeyValueFilter} + * objects, each of which contains key-value pairs with a comparison operator + * (equals/not equals). Both key (which represents config name) and value (which + * is config value) are strings. As it is a filter list, config filters can be + * evaluated with logical AND/OR and we can create a hierarchy of these + * {@link TimelineKeyValueFilter} objects. If null or empty, the filter is not + * applied.</li> * <li><b>metricFilters</b> - Matched entities should contain the given - * metrics. If null or empty, the filter is not applied.</li> - * <li><b>eventFilters</b> - Matched entities should contain the given - * events. If null or empty, the filter is not applied.</li> + * metrics and satisfy the specified relation with the value. This is + * represented as a {@link TimelineFilterList} object containing + * {@link TimelineCompareFilter} objects, each of which contains key-value pairs + * along with the specified relational/comparison operator represented by + * {@link TimelineCompareOp}. The key is a string and value is integer + * (Short/Integer/Long). As it is a filter list, metric filters can be evaluated + * with logical AND/OR and we can create a hierarchy of these + * {@link TimelineCompareFilter} objects. If null or empty, the filter is not + * applied.</li> + * <li><b>eventFilters</b> - Matched entities should contain or not contain the + * given events. This is represented as a {@link TimelineFilterList} object + * containing {@link TimelineExistsFilter} objects, each of which contains a + * value which must or must not exist depending on comparison operator specified + * in the filter. For event filters, the value represents a event id. As it is a + * filter list, event filters can be evaluated with logical AND/OR and we can + * create a hierarchy of these {@link TimelineExistsFilter} objects. If null or + * empty, the filter is not applied.</li> * </ul> */ @Private @Unstable public class TimelineEntityFilters { - private Long limit; - private Long createdTimeBegin; - private Long createdTimeEnd; - private Map<String, Set<String>> relatesTo; - private Map<String, Set<String>> isRelatedTo; - private Map<String, Object> infoFilters; - private Map<String, String> configFilters; - private Set<String> metricFilters; - private Set<String> eventFilters; - private static final Long DEFAULT_BEGIN_TIME = 0L; - private static final Long DEFAULT_END_TIME = Long.MAX_VALUE; + private long limit; + private long createdTimeBegin; + private long createdTimeEnd; + private TimelineFilterList relatesTo; + private TimelineFilterList isRelatedTo; + private TimelineFilterList infoFilters; + private TimelineFilterList configFilters; + private TimelineFilterList metricFilters; + private TimelineFilterList eventFilters; + private static final long DEFAULT_BEGIN_TIME = 0L; + private static final long DEFAULT_END_TIME = Long.MAX_VALUE; /** * Default limit of number of entities to return for getEntities API. @@ -78,23 +126,26 @@ public class TimelineEntityFilters { public TimelineEntityFilters( Long entityLimit, Long timeBegin, Long timeEnd, - Map<String, Set<String>> entityRelatesTo, - Map<String, Set<String>> entityIsRelatedTo, - Map<String, Object> entityInfoFilters, - Map<String, String> entityConfigFilters, - Set<String> entityMetricFilters, - Set<String> entityEventFilters) { - this.limit = entityLimit; - if (this.limit == null || this.limit < 0) { + TimelineFilterList entityRelatesTo, + TimelineFilterList entityIsRelatedTo, + TimelineFilterList entityInfoFilters, + TimelineFilterList entityConfigFilters, + TimelineFilterList entityMetricFilters, + TimelineFilterList entityEventFilters) { + if (entityLimit == null || entityLimit < 0) { this.limit = DEFAULT_LIMIT; + } else { + this.limit = entityLimit; } - this.createdTimeBegin = timeBegin; - if (this.createdTimeBegin == null || this.createdTimeBegin < 0) { + if (timeBegin == null || timeBegin < 0) { this.createdTimeBegin = DEFAULT_BEGIN_TIME; + } else { + this.createdTimeBegin = timeBegin; } - this.createdTimeEnd = timeEnd; - if (this.createdTimeEnd == null || this.createdTimeEnd < 0) { + if (timeEnd == null || timeEnd < 0) { this.createdTimeEnd = DEFAULT_END_TIME; + } else { + this.createdTimeEnd = timeEnd; } this.relatesTo = entityRelatesTo; this.isRelatedTo = entityIsRelatedTo; @@ -104,84 +155,87 @@ public class TimelineEntityFilters { this.eventFilters = entityEventFilters; } - public Long getLimit() { + public long getLimit() { return limit; } public void setLimit(Long entityLimit) { - this.limit = entityLimit; - if (this.limit == null || this.limit < 0) { + if (entityLimit == null || entityLimit < 0) { this.limit = DEFAULT_LIMIT; + } else { + this.limit = entityLimit; } } - public Long getCreatedTimeBegin() { + public long getCreatedTimeBegin() { return createdTimeBegin; } public void setCreatedTimeBegin(Long timeBegin) { - this.createdTimeBegin = timeBegin; - if (this.createdTimeBegin == null || this.createdTimeBegin < 0) { + if (timeBegin == null || timeBegin < 0) { this.createdTimeBegin = DEFAULT_BEGIN_TIME; + } else { + this.createdTimeBegin = timeBegin; } } - public Long getCreatedTimeEnd() { + public long getCreatedTimeEnd() { return createdTimeEnd; } public void setCreatedTimeEnd(Long timeEnd) { - this.createdTimeEnd = timeEnd; - if (this.createdTimeEnd == null || this.createdTimeEnd < 0) { + if (timeEnd == null || timeEnd < 0) { this.createdTimeEnd = DEFAULT_END_TIME; + } else { + this.createdTimeEnd = timeEnd; } } - public Map<String, Set<String>> getRelatesTo() { + public TimelineFilterList getRelatesTo() { return relatesTo; } - public void setRelatesTo(Map<String, Set<String>> relations) { + public void setRelatesTo(TimelineFilterList relations) { this.relatesTo = relations; } - public Map<String, Set<String>> getIsRelatedTo() { + public TimelineFilterList getIsRelatedTo() { return isRelatedTo; } - public void setIsRelatedTo(Map<String, Set<String>> relations) { + public void setIsRelatedTo(TimelineFilterList relations) { this.isRelatedTo = relations; } - public Map<String, Object> getInfoFilters() { + public TimelineFilterList getInfoFilters() { return infoFilters; } - public void setInfoFilters(Map<String, Object> filters) { + public void setInfoFilters(TimelineFilterList filters) { this.infoFilters = filters; } - public Map<String, String> getConfigFilters() { + public TimelineFilterList getConfigFilters() { return configFilters; } - public void setConfigFilters(Map<String, String> filters) { + public void setConfigFilters(TimelineFilterList filters) { this.configFilters = filters; } - public Set<String> getMetricFilters() { + public TimelineFilterList getMetricFilters() { return metricFilters; } - public void setMetricFilters(Set<String> filters) { + public void setMetricFilters(TimelineFilterList filters) { this.metricFilters = filters; } - public Set<String> getEventFilters() { + public TimelineFilterList getEventFilters() { return eventFilters; } - public void setEventFilters(Set<String> filters) { + public void setEventFilters(TimelineFilterList filters) { this.eventFilters = filters; } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java index d12f7e5..57d75db 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java @@ -20,15 +20,19 @@ package org.apache.hadoop.yarn.server.timelineservice.reader; import java.io.IOException; import java.util.EnumSet; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; /** @@ -87,7 +91,7 @@ final class TimelineReaderWebServicesUtils { parseKeyStrValuesStr(isRelatedTo, COMMA_DELIMITER, COLON_DELIMITER), parseKeyStrValueObj(infofilters, COMMA_DELIMITER, COLON_DELIMITER), parseKeyStrValueStr(conffilters, COMMA_DELIMITER, COLON_DELIMITER), - parseValuesStr(metricfilters, COMMA_DELIMITER), + parseMetricFilters(metricfilters, COMMA_DELIMITER), parseValuesStr(eventfilters, COMMA_DELIMITER)); } @@ -114,22 +118,26 @@ final class TimelineReaderWebServicesUtils { * @param delimiter string is delimited by this delimiter. * @return set of strings. */ - static Set<String> parseValuesStr(String str, String delimiter) { + static TimelineFilterList parseValuesStr(String str, String delimiter) { if (str == null || str.isEmpty()) { return null; } - Set<String> strSet = new HashSet<String>(); + TimelineFilterList filterList = new TimelineFilterList(); String[] strs = str.split(delimiter); for (String aStr : strs) { - strSet.add(aStr.trim()); + filterList.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, + aStr.trim())); } - return strSet; + return filterList; } - @SuppressWarnings("unchecked") - private static <T> void parseKeyValues(Map<String, T> map, String str, + private static TimelineFilterList parseKeyValues(String str, String pairsDelim, String keyValuesDelim, boolean stringValue, boolean multipleValues) { + if (str == null) { + return null; + } + TimelineFilterList list = new TimelineFilterList(); String[] pairs = str.split(pairsDelim); for (String pair : pairs) { if (pair == null || pair.trim().isEmpty()) { @@ -143,23 +151,28 @@ final class TimelineReaderWebServicesUtils { try { Object value = GenericObjectMapper.OBJECT_READER.readValue(pairStrs[1].trim()); - map.put(pairStrs[0].trim(), (T) value); + list.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, + pairStrs[0].trim(), value)); } catch (IOException e) { - map.put(pairStrs[0].trim(), (T) pairStrs[1].trim()); + list.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, + pairStrs[0].trim(), pairStrs[1].trim())); } } else { String key = pairStrs[0].trim(); if (multipleValues) { - Set<String> values = new HashSet<String>(); + Set<Object> values = new HashSet<Object>(); for (int i = 1; i < pairStrs.length; i++) { values.add(pairStrs[i].trim()); } - map.put(key, (T) values); + list.addFilter(new TimelineKeyValuesFilter( + TimelineCompareOp.EQUAL, key, values)); } else { - map.put(key, (T) pairStrs[1].trim()); + list.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, + key, pairStrs[1].trim())); } } } + return list; } /** @@ -175,14 +188,9 @@ final class TimelineReaderWebServicesUtils { * @param keyValuesDelim values for a key are delimited by this delimiter. * @return a map of key-values with each key having a set of values. */ - static Map<String, Set<String>> parseKeyStrValuesStr(String str, - String pairsDelim, String keyValuesDelim) { - if (str == null) { - return null; - } - Map<String, Set<String>> map = new HashMap<String, Set<String>>(); - parseKeyValues(map, str, pairsDelim, keyValuesDelim, true, true); - return map; + static TimelineFilterList parseKeyStrValuesStr(String str, String pairsDelim, + String keyValuesDelim) { + return parseKeyValues(str, pairsDelim, keyValuesDelim, true, true); } /** @@ -195,14 +203,9 @@ final class TimelineReaderWebServicesUtils { * @param keyValDelim key and value are delimited by this delimiter. * @return a map of key-value pairs with both key and value being strings. */ - static Map<String, String> parseKeyStrValueStr(String str, - String pairsDelim, String keyValDelim) { - if (str == null) { - return null; - } - Map<String, String> map = new HashMap<String, String>(); - parseKeyValues(map, str, pairsDelim, keyValDelim, true, false); - return map; + static TimelineFilterList parseKeyStrValueStr(String str, String pairsDelim, + String keyValDelim) { + return parseKeyValues(str, pairsDelim, keyValDelim, true, false); } /** @@ -216,14 +219,9 @@ final class TimelineReaderWebServicesUtils { * @return a map of key-value pairs with key being a string and value, any * object. */ - static Map<String, Object> parseKeyStrValueObj(String str, - String pairsDelim, String keyValDelim) { - if (str == null) { - return null; - } - Map<String, Object> map = new HashMap<String, Object>(); - parseKeyValues(map, str, pairsDelim, keyValDelim, false, false); - return map; + static TimelineFilterList parseKeyStrValueObj(String str, String pairsDelim, + String keyValDelim) { + return parseKeyValues(str, pairsDelim, keyValDelim, false, false); } /** @@ -247,6 +245,20 @@ final class TimelineReaderWebServicesUtils { return fieldList; } + static TimelineFilterList parseMetricFilters(String str, + String delimiter) { + if (str == null || str.isEmpty()) { + return null; + } + TimelineFilterList list = new TimelineFilterList(); + String[] strs = str.split(delimiter); + for (String aStr : strs) { + list.addFilter(new TimelineCompareFilter( + TimelineCompareOp.GREATER_OR_EQUAL, aStr.trim(), 0L)); + } + return list; + } + /** * Interpret passed string as a long. * @param str Passed string. http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineCompareFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineCompareFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineCompareFilter.java index 14e7124..81902ee 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineCompareFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineCompareFilter.java @@ -29,17 +29,27 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; @Unstable public class TimelineCompareFilter extends TimelineFilter { - private TimelineCompareOp compareOp; - private String key; - private Object value; + private final TimelineCompareOp compareOp; + private final String key; + private final Object value; + // If comparison operator is NOT_EQUAL, this flag decides if we should return + // the entity if key does not exist. + private final boolean keyMustExist; - public TimelineCompareFilter() { - } - - public TimelineCompareFilter(TimelineCompareOp op, String key, Object val) { + public TimelineCompareFilter(TimelineCompareOp op, String key, Object val, + boolean keyMustExistFlag) { this.compareOp = op; this.key = key; this.value = val; + if (op == TimelineCompareOp.NOT_EQUAL) { + this.keyMustExist = keyMustExistFlag; + } else { + this.keyMustExist = true; + } + } + + public TimelineCompareFilter(TimelineCompareOp op, String key, Object val) { + this(op, key, val, true); } @Override @@ -58,4 +68,15 @@ public class TimelineCompareFilter extends TimelineFilter { public Object getValue() { return value; } + + public boolean getKeyMustExist() { + return keyMustExist; + } + + @Override + public String toString() { + return String.format("%s (%s, %s:%s:%b)", + this.getClass().getSimpleName(), this.compareOp.name(), + this.key, this.value, this.keyMustExist); + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java new file mode 100644 index 0000000..36d0d7b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.reader.filter; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * Filter class which represents filter to be applied based on existence of a + * value. + */ +@Private +@Unstable +public class TimelineExistsFilter extends TimelineFilter { + + private final TimelineCompareOp compareOp; + private final String value; + + public TimelineExistsFilter(TimelineCompareOp op, String value) { + this.value = value; + if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) { + throw new IllegalArgumentException("CompareOp for exists filter should " + + "be EQUAL or NOT_EQUAL"); + } + this.compareOp = op; + } + + @Override + public TimelineFilterType getFilterType() { + return TimelineFilterType.EXISTS; + } + + public String getValue() { + return value; + } + + public TimelineCompareOp getCompareOp() { + return compareOp; + } + + @Override + public String toString() { + return String.format("%s (%s %s)", + this.getClass().getSimpleName(), this.compareOp.name(), this.value); + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilter.java index d4b4045..5e84976 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilter.java @@ -39,13 +39,25 @@ public abstract class TimelineFilter { */ LIST, /** - * Filter which is used for comparison. + * Filter which is used for key-value comparison. */ COMPARE, /** + * Filter which is used for checking key-value equality. + */ + KEY_VALUE, + /** + * Filter which is used for checking key-multiple values equality. + */ + KEY_VALUES, + /** * Filter which matches prefix for a config or a metric. */ - PREFIX + PREFIX, + /** + * Filter which checks existence of a value. + */ + EXISTS } public abstract TimelineFilterType getFilterType(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterList.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterList.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterList.java index 8727bd7..57b41a6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterList.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterList.java @@ -53,6 +53,14 @@ public class TimelineFilterList extends TimelineFilter { this(Operator.AND, filters); } + public TimelineFilterList() { + this(Operator.AND); + } + + public TimelineFilterList(Operator op) { + this.operator = op; + } + public TimelineFilterList(Operator op, TimelineFilter...filters) { this.operator = op; this.filterList = new ArrayList<TimelineFilter>(Arrays.asList(filters)); @@ -88,4 +96,10 @@ public class TimelineFilterList extends TimelineFilter { public void addFilter(TimelineFilter filter) { filterList.add(filter); } + + @Override + public String toString() { + return String.format("TimelineFilterList %s (%d): %s", + this.operator, this.filterList.size(), this.filterList.toString()); + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java index f902500..8cae410 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java @@ -18,25 +18,40 @@ package org.apache.hadoop.yarn.server.timelineservice.reader.filter; +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.FamilyFilter; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; + import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; /** * Set of utility methods used by timeline filter classes. */ public final class TimelineFilterUtils { + private static final Log LOG = LogFactory.getLog(TimelineFilterUtils.class); + private TimelineFilterUtils() { } /** * Returns the equivalent HBase filter list's {@link Operator}. - * @param op + * + * @param op timeline filter list operator. * @return HBase filter list's Operator. */ private static Operator getHBaseOperator(TimelineFilterList.Operator op) { @@ -52,7 +67,8 @@ public final class TimelineFilterUtils { /** * Returns the equivalent HBase compare filter's {@link CompareOp}. - * @param op + * + * @param op timeline compare op. * @return HBase compare filter's CompareOp. */ private static CompareOp getHBaseCompareOp( @@ -90,6 +106,159 @@ public final class TimelineFilterUtils { } /** + * Create a HBase {@link QualifierFilter} for the passed column prefix and + * compare op. + * + * @param <T> Describes the type of column prefix. + * @param compareOp compare op. + * @param columnPrefix column prefix. + * @return a column qualifier filter. + */ + public static <T> Filter createHBaseQualifierFilter(CompareOp compareOp, + ColumnPrefix<T> columnPrefix) { + return new QualifierFilter(compareOp, + new BinaryPrefixComparator( + columnPrefix.getColumnPrefixBytes(""))); + } + + /** + * Create filters for confs or metrics to retrieve. This list includes a + * configs/metrics family filter and relevant filters for confs/metrics to + * retrieve, if present. + * + * @param <T> Describes the type of column prefix. + * @param confsOrMetricToRetrieve configs/metrics to retrieve. + * @param columnFamily config or metric column family. + * @param columnPrefix config or metric column prefix. + * @return a filter list. + * @throws IOException if any problem occurs while creating the filters. + */ + public static <T> Filter createFilterForConfsOrMetricsToRetrieve( + TimelineFilterList confsOrMetricToRetrieve, ColumnFamily<T> columnFamily, + ColumnPrefix<T> columnPrefix) throws IOException { + Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, + new BinaryComparator(columnFamily.getBytes())); + if (confsOrMetricToRetrieve != null && + !confsOrMetricToRetrieve.getFilterList().isEmpty()) { + // If confsOrMetricsToRetrive are specified, create a filter list based + // on it and family filter. + FilterList filter = new FilterList(familyFilter); + filter.addFilter( + createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve)); + return filter; + } else { + // Only the family filter needs to be added. + return familyFilter; + } + } + + /** + * Create 2 HBase {@link SingleColumnValueFilter} filters for the specified + * value range represented by start and end value and wraps them inside a + * filter list. Start and end value should not be null. + * + * @param <T> Describes the type of column prefix. + * @param column Column for which single column value filter is to be created. + * @param startValue Start value. + * @param endValue End value. + * @return 2 single column value filters wrapped in a filter list. + * @throws IOException if any problem is encountered while encoding value. + */ + public static <T> FilterList createSingleColValueFiltersByRange( + Column<T> column, Object startValue, Object endValue) throws IOException { + FilterList list = new FilterList(); + Filter singleColValFilterStart = createHBaseSingleColValueFilter( + column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), + column.getValueConverter().encodeValue(startValue), + CompareOp.GREATER_OR_EQUAL, true); + list.addFilter(singleColValFilterStart); + + Filter singleColValFilterEnd = createHBaseSingleColValueFilter( + column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), + column.getValueConverter().encodeValue(endValue), + CompareOp.LESS_OR_EQUAL, true); + list.addFilter(singleColValFilterEnd); + return list; + } + + /** + * Creates a HBase {@link SingleColumnValueFilter}. + * + * @param columnFamily Column Family represented as bytes. + * @param columnQualifier Column Qualifier represented as bytes. + * @param value Value. + * @param compareOp Compare operator. + * @param filterIfMissing This flag decides if we should filter the row if the + * specified column is missing. This is based on the filter's keyMustExist + * field. + * @return a {@link SingleColumnValueFilter} object + * @throws IOException + */ + private static SingleColumnValueFilter createHBaseSingleColValueFilter( + byte[] columnFamily, byte[] columnQualifier, byte[] value, + CompareOp compareOp, boolean filterIfMissing) throws IOException { + SingleColumnValueFilter singleColValFilter = + new SingleColumnValueFilter(columnFamily, columnQualifier, compareOp, + new BinaryComparator(value)); + singleColValFilter.setLatestVersionOnly(true); + singleColValFilter.setFilterIfMissing(filterIfMissing); + return singleColValFilter; + } + + /** + * Create a filter list of qualifier filters based on passed set of columns. + * + * @param <T> Describes the type of column prefix. + * @param colPrefix Column Prefix. + * @param columns set of column qualifiers. + * @return filter list. + */ + public static <T> FilterList createFiltersFromColumnQualifiers( + ColumnPrefix<T> colPrefix, Set<String> columns) { + FilterList list = new FilterList(Operator.MUST_PASS_ONE); + for (String column : columns) { + // For columns which have compound column qualifiers (eg. events), we need + // to include the required separator. + byte[] compoundColQual = + colPrefix.getCompoundColQualBytes(column, (byte[])null); + list.addFilter(new QualifierFilter(CompareOp.EQUAL, + new BinaryPrefixComparator( + colPrefix.getColumnPrefixBytes(compoundColQual)))); + } + return list; + } + + /** + * Fetch columns from filter list containing exists and multivalue equality + * filters. This is done to fetch only required columns from back-end and + * then match event filters or relationships in reader. + * + * @param filterList filter list. + * @return set of columns. + */ + public static Set<String> fetchColumnsFromFilterList( + TimelineFilterList filterList) { + Set<String> strSet = new HashSet<String>(); + for (TimelineFilter filter : filterList.getFilterList()) { + switch(filter.getFilterType()) { + case LIST: + strSet.addAll(fetchColumnsFromFilterList((TimelineFilterList)filter)); + break; + case KEY_VALUES: + strSet.add(((TimelineKeyValuesFilter)filter).getKey()); + break; + case EXISTS: + strSet.add(((TimelineExistsFilter)filter).getValue()); + break; + default: + LOG.info("Unexpected filter type " + filter.getFilterType()); + break; + } + } + return strSet; + } + + /** * Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList} * while converting different timeline filters(of type {@link TimelineFilter}) * into their equivalent HBase filters. @@ -98,22 +267,45 @@ public final class TimelineFilterUtils { * @param colPrefix column prefix which will be used for conversion. * @param filterList timeline filter list which has to be converted. * @return A {@link FilterList} object. + * @throws IOException if any problem occurs while creating the filter list. */ public static <T> FilterList createHBaseFilterList(ColumnPrefix<T> colPrefix, - TimelineFilterList filterList) { + TimelineFilterList filterList) throws IOException { FilterList list = new FilterList(getHBaseOperator(filterList.getOperator())); for (TimelineFilter filter : filterList.getFilterList()) { switch(filter.getFilterType()) { case LIST: - list.addFilter( - createHBaseFilterList(colPrefix, (TimelineFilterList)filter)); + list.addFilter(createHBaseFilterList(colPrefix, + (TimelineFilterList)filter)); break; case PREFIX: - list.addFilter(createHBaseColQualPrefixFilter( - colPrefix, (TimelinePrefixFilter)filter)); + list.addFilter(createHBaseColQualPrefixFilter(colPrefix, + (TimelinePrefixFilter)filter)); + break; + case COMPARE: + TimelineCompareFilter compareFilter = (TimelineCompareFilter)filter; + list.addFilter( + createHBaseSingleColValueFilter( + colPrefix.getColumnFamilyBytes(), + colPrefix.getColumnPrefixBytes(compareFilter.getKey()), + colPrefix.getValueConverter(). + encodeValue(compareFilter.getValue()), + getHBaseCompareOp(compareFilter.getCompareOp()), + compareFilter.getKeyMustExist())); + break; + case KEY_VALUE: + TimelineKeyValueFilter kvFilter = (TimelineKeyValueFilter)filter; + list.addFilter( + createHBaseSingleColValueFilter( + colPrefix.getColumnFamilyBytes(), + colPrefix.getColumnPrefixBytes(kvFilter.getKey()), + colPrefix.getValueConverter().encodeValue(kvFilter.getValue()), + getHBaseCompareOp(kvFilter.getCompareOp()), + kvFilter.getKeyMustExist())); break; default: + LOG.info("Unexpected filter type " + filter.getFilterType()); break; } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java new file mode 100644 index 0000000..58f0ee9 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.reader.filter; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * Filter class which represents filter to be applied based on key-value pair + * being equal or not to the values in back-end store. + */ +@Private +@Unstable +public class TimelineKeyValueFilter extends TimelineCompareFilter { + public TimelineKeyValueFilter(TimelineCompareOp op, String key, Object val, + boolean keyMustExistFlag) { + super(op, key, val, keyMustExistFlag); + if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) { + throw new IllegalArgumentException("TimelineCompareOp for equality" + + " filter should be EQUAL or NOT_EQUAL"); + } + } + + public TimelineKeyValueFilter(TimelineCompareOp op, String key, Object val) { + this(op, key, val, true); + } + + @Override + public TimelineFilterType getFilterType() { + return TimelineFilterType.KEY_VALUE; + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValuesFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValuesFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValuesFilter.java new file mode 100644 index 0000000..0d34d47 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValuesFilter.java @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.reader.filter; + +import java.util.Set; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * Filter class which represents filter to be applied based on multiple values + * for a key and these values being equal or not equal to values in back-end + * store. + */ +@Private +@Unstable +public class TimelineKeyValuesFilter extends TimelineFilter { + private final TimelineCompareOp compareOp; + private final String key; + private final Set<Object> values; + public TimelineKeyValuesFilter(TimelineCompareOp op, String key, + Set<Object> values) { + if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) { + throw new IllegalArgumentException("TimelineCompareOp for multi value " + + "equality filter should be EQUAL or NOT_EQUAL"); + } + this.compareOp = op; + this.key = key; + this.values = values; + } + + @Override + public TimelineFilterType getFilterType() { + return TimelineFilterType.KEY_VALUES; + } + + public String getKey() { + return key; + } + + public Set<Object> getValues() { + return values; + } + + public TimelineCompareOp getCompareOp() { + return compareOp; + } + + @Override + public String toString() { + return String.format("%s (%s, %s:%s)", + this.getClass().getSimpleName(), this.compareOp.name(), + this.key, (values == null) ? "" : values.toString()); + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelinePrefixFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelinePrefixFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelinePrefixFilter.java index 6233f26..f36e593 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelinePrefixFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelinePrefixFilter.java @@ -53,4 +53,10 @@ public class TimelinePrefixFilter extends TimelineFilter { public TimelineCompareOp getCompareOp() { return compareOp; } + + @Override + public String toString() { + return String.format("%s (%s %s)", + this.getClass().getSimpleName(), this.compareOp.name(), this.prefix); + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java index 97e05dd..bdddd7e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java @@ -296,39 +296,39 @@ public class FileSystemTimelineReaderImpl extends AbstractService continue; } if (filters.getRelatesTo() != null && - !filters.getRelatesTo().isEmpty() && - !TimelineStorageUtils.matchRelations( - entity.getRelatesToEntities(), filters.getRelatesTo())) { + !filters.getRelatesTo().getFilterList().isEmpty() && + !TimelineStorageUtils.matchRelatesTo(entity, + filters.getRelatesTo())) { continue; } if (filters.getIsRelatedTo() != null && - !filters.getIsRelatedTo().isEmpty() && - !TimelineStorageUtils.matchRelations( - entity.getIsRelatedToEntities(), filters.getIsRelatedTo())) { + !filters.getIsRelatedTo().getFilterList().isEmpty() && + !TimelineStorageUtils.matchIsRelatedTo(entity, + filters.getIsRelatedTo())) { continue; } if (filters.getInfoFilters() != null && - !filters.getInfoFilters().isEmpty() && - !TimelineStorageUtils.matchFilters( - entity.getInfo(), filters.getInfoFilters())) { + !filters.getInfoFilters().getFilterList().isEmpty() && + !TimelineStorageUtils.matchInfoFilters(entity, + filters.getInfoFilters())) { continue; } if (filters.getConfigFilters() != null && - !filters.getConfigFilters().isEmpty() && - !TimelineStorageUtils.matchFilters( - entity.getConfigs(), filters.getConfigFilters())) { + !filters.getConfigFilters().getFilterList().isEmpty() && + !TimelineStorageUtils.matchConfigFilters(entity, + filters.getConfigFilters())) { continue; } if (filters.getMetricFilters() != null && - !filters.getMetricFilters().isEmpty() && - !TimelineStorageUtils.matchMetricFilters( - entity.getMetrics(), filters.getMetricFilters())) { + !filters.getMetricFilters().getFilterList().isEmpty() && + !TimelineStorageUtils.matchMetricFilters(entity, + filters.getMetricFilters())) { continue; } if (filters.getEventFilters() != null && - !filters.getEventFilters().isEmpty() && - !TimelineStorageUtils.matchEventFilters( - entity.getEvents(), filters.getEventFilters())) { + !filters.getEventFilters().getFilterList().isEmpty() && + !TimelineStorageUtils.matchEventFilters(entity, + filters.getEventFilters())) { continue; } TimelineEntity entityToBeReturned = createEntityToBeReturned( http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java index b75007d..172f982 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java @@ -407,36 +407,39 @@ public class HBaseTimelineWriterImpl extends AbstractService implements "! Using the current timestamp"); eventTimestamp = System.currentTimeMillis(); } - byte[] columnQualifierFirst = - Bytes.toBytes(Separator.VALUES.encode(eventId)); - byte[] columnQualifierWithTsBytes = Separator.VALUES. - join(columnQualifierFirst, Bytes.toBytes( - TimelineStorageUtils.invertLong(eventTimestamp))); + byte[] eventTs = + Bytes.toBytes(TimelineStorageUtils.invertLong(eventTimestamp)); Map<String, Object> eventInfo = event.getInfo(); if ((eventInfo == null) || (eventInfo.size() == 0)) { - // add separator since event key is empty - byte[] compoundColumnQualifierBytes = - Separator.VALUES.join(columnQualifierWithTsBytes, - null); if (isApplication) { + byte[] compoundColumnQualifierBytes = + ApplicationColumnPrefix.EVENT. + getCompoundColQualBytes(eventId, eventTs, null); ApplicationColumnPrefix.EVENT.store(rowKey, applicationTable, compoundColumnQualifierBytes, null, - TimelineStorageUtils.EMPTY_BYTES); + TimelineStorageUtils.EMPTY_BYTES); } else { + byte[] compoundColumnQualifierBytes = + EntityColumnPrefix.EVENT. + getCompoundColQualBytes(eventId, eventTs, null); EntityColumnPrefix.EVENT.store(rowKey, entityTable, compoundColumnQualifierBytes, null, - TimelineStorageUtils.EMPTY_BYTES); + TimelineStorageUtils.EMPTY_BYTES); } } else { for (Map.Entry<String, Object> info : eventInfo.entrySet()) { // eventId?infoKey - byte[] compoundColumnQualifierBytes = - Separator.VALUES.join(columnQualifierWithTsBytes, - Bytes.toBytes(info.getKey())); + byte[] infoKey = Bytes.toBytes(info.getKey()); if (isApplication) { + byte[] compoundColumnQualifierBytes = + ApplicationColumnPrefix.EVENT. + getCompoundColQualBytes(eventId, eventTs, infoKey); ApplicationColumnPrefix.EVENT.store(rowKey, applicationTable, compoundColumnQualifierBytes, null, info.getValue()); } else { + byte[] compoundColumnQualifierBytes = + EntityColumnPrefix.EVENT. + getCompoundColQualBytes(eventId, eventTs, infoKey); EntityColumnPrefix.EVENT.store(rowKey, entityTable, compoundColumnQualifierBytes, null, info.getValue()); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java index 5734389..80fcf8c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java @@ -24,8 +24,11 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; /** @@ -41,7 +44,8 @@ public enum ApplicationColumn implements Column<ApplicationTable> { /** * When the application was created. */ - CREATED_TIME(ApplicationColumnFamily.INFO, "created_time"), + CREATED_TIME(ApplicationColumnFamily.INFO, "created_time", + LongConverter.getInstance()), /** * The version of the flow that this app belongs to. @@ -55,12 +59,17 @@ public enum ApplicationColumn implements Column<ApplicationTable> { private ApplicationColumn(ColumnFamily<ApplicationTable> columnFamily, String columnQualifier) { + this(columnFamily, columnQualifier, GenericConverter.getInstance()); + } + + private ApplicationColumn(ColumnFamily<ApplicationTable> columnFamily, + String columnQualifier, ValueConverter converter) { this.columnFamily = columnFamily; this.columnQualifier = columnQualifier; // Future-proof by ensuring the right column prefix hygiene. this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); - this.column = new ColumnHelper<ApplicationTable>(columnFamily); + this.column = new ColumnHelper<ApplicationTable>(columnFamily, converter); } /** @@ -81,6 +90,21 @@ public enum ApplicationColumn implements Column<ApplicationTable> { return column.readResult(result, columnQualifierBytes); } + @Override + public byte[] getColumnQualifierBytes() { + return columnQualifierBytes.clone(); + } + + @Override + public byte[] getColumnFamilyBytes() { + return columnFamily.getBytes(); + } + + @Override + public ValueConverter getValueConverter() { + return column.getValueConverter(); + } + /** * Retrieve an {@link ApplicationColumn} given a name, or null if there is no * match. The following holds true: {@code columnFor(x) == columnFor(y)} if http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java index 9120f3d..1dfc4db 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java @@ -56,7 +56,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> { /** * Lifecycle events for an application. */ - EVENT(ApplicationColumnFamily.INFO, "e"), + EVENT(ApplicationColumnFamily.INFO, "e", true), /** * Config column stores configuration with config key as the column name. @@ -78,6 +78,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> { */ private final String columnPrefix; private final byte[] columnPrefixBytes; + private final boolean compoundColQual; /** * Private constructor, meant to be used by the enum definition. @@ -87,7 +88,18 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> { */ private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily, String columnPrefix) { - this(columnFamily, columnPrefix, GenericConverter.getInstance()); + this(columnFamily, columnPrefix, false, GenericConverter.getInstance()); + } + + private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily, + String columnPrefix, boolean compoundColQual) { + this(columnFamily, columnPrefix, compoundColQual, + GenericConverter.getInstance()); + } + + private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily, + String columnPrefix, ValueConverter converter) { + this(columnFamily, columnPrefix, false, converter); } /** @@ -99,7 +111,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> { * this column prefix. */ private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily, - String columnPrefix, ValueConverter converter) { + String columnPrefix, boolean compoundColQual, ValueConverter converter) { column = new ColumnHelper<ApplicationTable>(columnFamily, converter); this.columnFamily = columnFamily; this.columnPrefix = columnPrefix; @@ -110,6 +122,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> { this.columnPrefixBytes = Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); } + this.compoundColQual = compoundColQual; } /** @@ -131,6 +144,20 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> { this.columnPrefixBytes, qualifierPrefix); } + @Override + public byte[] getColumnFamilyBytes() { + return columnFamily.getBytes(); + } + + @Override + public byte[] getCompoundColQualBytes(String qualifier, + byte[]...components) { + if (!compoundColQual) { + return ColumnHelper.getColumnQualifier(null, qualifier); + } + return ColumnHelper.getCompoundColumnQualifierBytes(qualifier, components); + } + /* * (non-Javadoc) * @@ -196,6 +223,10 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> { return column.readResult(result, columnQualifier); } + public ValueConverter getValueConverter() { + return column.getValueConverter(); + } + /* * (non-Javadoc) * http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java index a8e1c66..ff61633 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java @@ -25,6 +25,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; import java.io.IOException; @@ -71,6 +72,11 @@ public enum AppToFlowColumn implements Column<AppToFlowTable> { return columnQualifier; } + @Override + public byte[] getColumnQualifierBytes() { + return columnQualifierBytes.clone(); + } + public void store(byte[] rowKey, TypedBufferedMutator<AppToFlowTable> tableMutator, Long timestamp, Object inputValue, Attribute... attributes) throws IOException { @@ -78,6 +84,16 @@ public enum AppToFlowColumn implements Column<AppToFlowTable> { inputValue, attributes); } + @Override + public byte[] getColumnFamilyBytes() { + return columnFamily.getBytes(); + } + + @Override + public ValueConverter getValueConverter() { + return column.getValueConverter(); + } + public Object readResult(Result result) throws IOException { return column.readResult(result, columnQualifierBytes); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java index 1f0b48f..90f2de4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java @@ -60,4 +60,21 @@ public interface Column<T> { */ Object readResult(Result result) throws IOException; + /** + * Returns column family name(as bytes) associated with this column. + * @return a byte array encoding column family for this column qualifier. + */ + byte[] getColumnFamilyBytes(); + + /** + * Get byte representation for this column qualifier. + * @return a byte array representing column qualifier. + */ + byte[] getColumnQualifierBytes(); + + /** + * Returns value converter implementation associated with this column. + * @return a {@link ValueConverter} implementation. + */ + ValueConverter getValueConverter(); } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java index 15bb818..4adb413 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java @@ -363,6 +363,22 @@ public class ColumnHelper<T> { } /** + * Create a compound column qualifier by combining qualifier and components. + * + * @param qualifier Column QUalifier. + * @param components Other components. + * @return a byte array representing compound column qualifier. + */ + public static byte[] getCompoundColumnQualifierBytes(String qualifier, + byte[]...components) { + byte[] colQualBytes = Bytes.toBytes(Separator.VALUES.encode(qualifier)); + for (int i = 0; i < components.length; i++) { + colQualBytes = Separator.VALUES.join(colQualBytes, components[i]); + } + return colQualBytes; + } + + /** * @param columnPrefixBytes The byte representation for the column prefix. * Should not contain {@link Separator#QUALIFIERS}. * @param qualifier for the remainder of the column. http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java index f221b31..e4b7f16 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java @@ -112,6 +112,18 @@ public interface ColumnPrefix<T> { readResultsWithTimestamps(Result result) throws IOException; /** + * @param result from which to read columns + * @return the latest values of columns in the column family. The column + * qualifier is returned as a list of parts, each part a byte[]. This + * is to facilitate returning byte arrays of values that were not + * Strings. If they can be treated as Strings, you should use + * {@link #readResults(Result)} instead. + * @throws IOException if any problem occurs while reading results. + */ + Map<?, Object> readResultsHavingCompoundColumnQualifiers(Result result) + throws IOException; + + /** * @param qualifierPrefix Column qualifier or prefix of qualifier. * @return a byte array encoding column prefix and qualifier/prefix passed. */ @@ -122,4 +134,27 @@ public interface ColumnPrefix<T> { * @return a byte array encoding column prefix and qualifier/prefix passed. */ byte[] getColumnPrefixBytes(byte[] qualifierPrefix); + + /** + * Returns column family name(as bytes) associated with this column prefix. + * @return a byte array encoding column family for this prefix. + */ + byte[] getColumnFamilyBytes(); + + /** + * Returns value converter implementation associated with this column prefix. + * @return a {@link ValueConverter} implementation. + */ + ValueConverter getValueConverter(); + + /** + * Get compound column qualifier bytes if the column qualifier is a compound + * qualifier. Returns the qualifier passed as bytes if the column is not a + * compound column qualifier. + * + * @param qualifier Column Qualifier. + * @param components Other components. + * @return byte array representing compound column qualifier. + */ + byte[] getCompoundColQualBytes(String qualifier, byte[]...components); } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineEntityFiltersType.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineEntityFiltersType.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineEntityFiltersType.java new file mode 100644 index 0000000..4099e92 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineEntityFiltersType.java @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter.TimelineFilterType; + +/** + * Used to define which filter to match. + */ +enum TimelineEntityFiltersType { + CONFIG { + boolean isValidFilter(TimelineFilterType filterType) { + return filterType == TimelineFilterType.LIST || + filterType == TimelineFilterType.KEY_VALUE; + } + }, + INFO { + boolean isValidFilter(TimelineFilterType filterType) { + return filterType == TimelineFilterType.LIST || + filterType == TimelineFilterType.KEY_VALUE; + } + }, + METRIC { + boolean isValidFilter(TimelineFilterType filterType) { + return filterType == TimelineFilterType.LIST || + filterType == TimelineFilterType.COMPARE; + } + }, + EVENT { + boolean isValidFilter(TimelineFilterType filterType) { + return filterType == TimelineFilterType.LIST || + filterType == TimelineFilterType.EXISTS; + } + }, + IS_RELATED_TO { + boolean isValidFilter(TimelineFilterType filterType) { + return filterType == TimelineFilterType.LIST || + filterType == TimelineFilterType.KEY_VALUES; + } + }, + RELATES_TO { + boolean isValidFilter(TimelineFilterType filterType) { + return filterType == TimelineFilterType.LIST || + filterType == TimelineFilterType.KEY_VALUES; + } + }; + + /** + * Checks whether filter type is valid for the filter being matched. + * + * @param filterType filter type. + * @return true, if its a valid filter, false otherwise. + */ + abstract boolean isValidFilter(TimelineFilterType filterType); +} \ No newline at end of file --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org