diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index ae0c8ca2d53..8a418d44ea4 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -203,6 +203,9 @@ Release 2.4.0 - UNRELEASED YARN-1470. Add audience annotations to MiniYARNCluster. (Anubhav Dhoot via kasha) + YARN-1732. Changed types of related-entities and primary-filters in the + timeline-service to be sets instead of maps. (Billie Rinaldi via vinodkv) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java index 709c79568e4..e7907d8b0db 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSEntity.java @@ -20,9 +20,11 @@ package org.apache.hadoop.yarn.api.records.apptimeline; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -56,10 +58,10 @@ public class ATSEntity implements Comparable { private String entityId; private Long startTime; private List events = new ArrayList(); - private Map> relatedEntities = - new HashMap>(); - private Map primaryFilters = - new HashMap(); + private Map> relatedEntities = + new HashMap>(); + private Map> primaryFilters = + new HashMap>(); private Map otherInfo = new HashMap(); @@ -173,7 +175,7 @@ public class ATSEntity implements Comparable { * @return the related entities */ @XmlElement(name = "relatedentities") - public Map> getRelatedEntities() { + public Map> getRelatedEntities() { return relatedEntities; } @@ -186,9 +188,9 @@ public class ATSEntity implements Comparable { * the entity Id */ public void addRelatedEntity(String entityType, String entityId) { - List thisRelatedEntity = relatedEntities.get(entityType); + Set thisRelatedEntity = relatedEntities.get(entityType); if (thisRelatedEntity == null) { - thisRelatedEntity = new ArrayList(); + thisRelatedEntity = new HashSet(); relatedEntities.put(entityType, thisRelatedEntity); } thisRelatedEntity.add(entityId); @@ -200,10 +202,10 @@ public class ATSEntity implements Comparable { * @param relatedEntities * a map of related entities */ - public void addRelatedEntities(Map> relatedEntities) { - for (Entry> relatedEntity : + public void addRelatedEntities(Map> relatedEntities) { + for (Entry> relatedEntity : relatedEntities.entrySet()) { - List thisRelatedEntity = + Set thisRelatedEntity = this.relatedEntities.get(relatedEntity.getKey()); if (thisRelatedEntity == null) { this.relatedEntities.put( @@ -221,7 +223,7 @@ public class ATSEntity implements Comparable { * a map of related entities */ public void setRelatedEntities( - Map> relatedEntities) { + Map> relatedEntities) { this.relatedEntities = relatedEntities; } @@ -231,7 +233,7 @@ public class ATSEntity implements Comparable { * @return the primary filters */ @XmlElement(name = "primaryfilters") - public Map getPrimaryFilters() { + public Map> getPrimaryFilters() { return primaryFilters; } @@ -244,7 +246,12 @@ public class ATSEntity implements Comparable { * the primary filter value */ public void addPrimaryFilter(String key, Object value) { - primaryFilters.put(key, value); + Set thisPrimaryFilter = primaryFilters.get(key); + if (thisPrimaryFilter == null) { + thisPrimaryFilter = new HashSet(); + primaryFilters.put(key, thisPrimaryFilter); + } + thisPrimaryFilter.add(value); } /** @@ -253,8 +260,18 @@ public class ATSEntity implements Comparable { * @param primaryFilters * a map of primary filters */ - public void addPrimaryFilters(Map primaryFilters) { - this.primaryFilters.putAll(primaryFilters); + public void addPrimaryFilters(Map> primaryFilters) { + for (Entry> primaryFilter : + primaryFilters.entrySet()) { + Set thisPrimaryFilter = + this.primaryFilters.get(primaryFilter.getKey()); + if (thisPrimaryFilter == null) { + this.primaryFilters.put( + primaryFilter.getKey(), primaryFilter.getValue()); + } else { + thisPrimaryFilter.addAll(primaryFilter.getValue()); + } + } } /** @@ -263,7 +280,7 @@ public class ATSEntity implements Comparable { * @param primaryFilters * a map of primary filters */ - public void setPrimaryFilters(Map primaryFilters) { + public void setPrimaryFilters(Map> primaryFilters) { this.primaryFilters = primaryFilters; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java index 38ceb30c7d4..3281a328446 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java @@ -17,11 +17,6 @@ */ package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.io.WritableUtils; -import org.codehaus.jackson.map.ObjectMapper; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -31,7 +26,11 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.WritableUtils; +import org.codehaus.jackson.map.ObjectMapper; /** * A utility class providing methods for serializing and deserializing @@ -132,9 +131,24 @@ public class GenericObjectMapper { * @throws IOException */ public static Object read(byte[] b) throws IOException { - if (b == null || b.length == 0) + return read(b, 0); + } + + /** + * Deserializes an Object from a byte array at a specified offset, assuming + * the bytes were created with {@link #write(Object)}. + * + * @param b A byte array + * @param offset Offset into the array + * @return An Object + * @throws IOException + */ + public static Object read(byte[] b, int offset) throws IOException { + if (b == null || b.length == 0) { return null; - ByteArrayInputStream bais = new ByteArrayInputStream(b); + } + ByteArrayInputStream bais = new ByteArrayInputStream(b, offset, + b.length - offset); return read(new DataInputStream(bais)); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java index c2e93cab948..7f4d838286b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java @@ -259,14 +259,12 @@ public class LeveldbApplicationTimelineStore extends AbstractService boolean relatedEntities = false; if (fields.contains(Field.RELATED_ENTITIES)) { relatedEntities = true; - atsEntity.setRelatedEntities(new HashMap>()); } else { atsEntity.setRelatedEntities(null); } boolean primaryFilters = false; if (fields.contains(Field.PRIMARY_FILTERS)) { primaryFilters = true; - atsEntity.setPrimaryFilters(new HashMap()); } else { atsEntity.setPrimaryFilters(null); } @@ -286,9 +284,8 @@ public class LeveldbApplicationTimelineStore extends AbstractService break; if (key[prefixlen] == PRIMARY_FILTER_COLUMN[0]) { if (primaryFilters) { - atsEntity.addPrimaryFilter(parseRemainingKey(key, - prefixlen + PRIMARY_FILTER_COLUMN.length), - GenericObjectMapper.read(iterator.peekNext().getValue())); + addPrimaryFilter(atsEntity, key, + prefixlen + PRIMARY_FILTER_COLUMN.length); } } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) { if (otherInfo) { @@ -507,9 +504,14 @@ public class LeveldbApplicationTimelineStore extends AbstractService if (secondaryFilters != null) { for (NameValuePair filter : secondaryFilters) { Object v = atsEntity.getOtherInfo().get(filter.getName()); - if (v == null) - v = atsEntity.getPrimaryFilters().get(filter.getName()); - if (v == null || !v.equals(filter.getValue())) { + if (v == null) { + Set vs = atsEntity.getPrimaryFilters() + .get(filter.getName()); + if (vs != null && !vs.contains(filter.getValue())) { + filterPassed = false; + break; + } + } else if (!v.equals(filter.getValue())) { filterPassed = false; break; } @@ -547,7 +549,7 @@ public class LeveldbApplicationTimelineStore extends AbstractService return; } Long revStartTimeLong = readReverseOrderedLong(revStartTime, 0); - Map primaryFilters = atsEntity.getPrimaryFilters(); + Map> primaryFilters = atsEntity.getPrimaryFilters(); // write event entries if (events != null && !events.isEmpty()) { @@ -563,10 +565,10 @@ public class LeveldbApplicationTimelineStore extends AbstractService } // write related entity entries - Map> relatedEntities = + Map> relatedEntities = atsEntity.getRelatedEntities(); if (relatedEntities != null && !relatedEntities.isEmpty()) { - for (Entry> relatedEntityList : + for (Entry> relatedEntityList : relatedEntities.entrySet()) { String relatedEntityType = relatedEntityList.getKey(); for (String relatedEntityId : relatedEntityList.getValue()) { @@ -595,12 +597,16 @@ public class LeveldbApplicationTimelineStore extends AbstractService // write primary filter entries if (primaryFilters != null && !primaryFilters.isEmpty()) { - for (Entry primaryFilter : primaryFilters.entrySet()) { - byte[] key = createPrimaryFilterKey(atsEntity.getEntityId(), - atsEntity.getEntityType(), revStartTime, primaryFilter.getKey()); - byte[] value = GenericObjectMapper.write(primaryFilter.getValue()); - writeBatch.put(key, value); - writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + for (Entry> primaryFilter : + primaryFilters.entrySet()) { + for (Object primaryFilterValue : primaryFilter.getValue()) { + byte[] key = createPrimaryFilterKey(atsEntity.getEntityId(), + atsEntity.getEntityType(), revStartTime, + primaryFilter.getKey(), primaryFilterValue); + writeBatch.put(key, EMPTY_BYTES); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, + EMPTY_BYTES); + } } } @@ -634,12 +640,14 @@ public class LeveldbApplicationTimelineStore extends AbstractService * write additional entries to the db for each primary filter. */ private static void writePrimaryFilterEntries(WriteBatch writeBatch, - Map primaryFilters, byte[] key, byte[] value) + Map> primaryFilters, byte[] key, byte[] value) throws IOException { if (primaryFilters != null && !primaryFilters.isEmpty()) { - for (Entry p : primaryFilters.entrySet()) { - writeBatch.put(addPrimaryFilterToKey(p.getKey(), p.getValue(), - key), value); + for (Entry> pf : primaryFilters.entrySet()) { + for (Object pfval : pf.getValue()) { + writeBatch.put(addPrimaryFilterToKey(pf.getKey(), pfval, + key), value); + } } } } @@ -790,13 +798,26 @@ public class LeveldbApplicationTimelineStore extends AbstractService /** * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX + - * entitytype + revstarttime + entity + PRIMARY_FILTER_COLUMN + name. + * entitytype + revstarttime + entity + PRIMARY_FILTER_COLUMN + name + value. */ private static byte[] createPrimaryFilterKey(String entity, - String entitytype, byte[] revStartTime, String name) throws IOException { + String entitytype, byte[] revStartTime, String name, Object value) + throws IOException { return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) .add(revStartTime).add(entity).add(PRIMARY_FILTER_COLUMN).add(name) - .getBytes(); + .add(GenericObjectMapper.write(value)).getBytes(); + } + + /** + * Parses the primary filter from the given key at the given offset and + * adds it to the given entity. + */ + private static void addPrimaryFilter(ATSEntity atsEntity, byte[] key, + int offset) throws IOException { + KeyParser kp = new KeyParser(key, offset); + String name = kp.getNextString(); + Object value = GenericObjectMapper.read(key, kp.getOffset()); + atsEntity.addPrimaryFilter(name, value); } /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java index 1c8e392cfe2..e3a35a0e849 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java @@ -91,7 +91,7 @@ public class MemoryApplicationTimelineStore continue; } if (primaryFilter != null && - !matchFilter(entity.getPrimaryFilters(), primaryFilter)) { + !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) { continue; } if (secondaryFilters != null) { // OR logic @@ -236,7 +236,7 @@ public class MemoryApplicationTimelineStore if (entity.getRelatedEntities() == null) { continue; } - for (Map.Entry> partRelatedEntities : entity + for (Map.Entry> partRelatedEntities : entity .getRelatedEntities().entrySet()) { if (partRelatedEntities == null) { continue; @@ -293,4 +293,14 @@ public class MemoryApplicationTimelineStore return true; } + private static boolean matchPrimaryFilter(Map> tags, + NameValuePair filter) { + Set value = tags.get(filter.getName()); + if (value == null) { // doesn't have the filter + return false; + } else { + return value.contains(filter.getValue()); + } + } + } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java index 9afa5c0234a..eb3b06d73aa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java @@ -21,16 +21,17 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -44,40 +45,48 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.Appli public class ApplicationTimelineStoreTestUtils { - private static final Map EMPTY_MAP = Collections.emptyMap(); - private static final Map> EMPTY_REL_ENTITIES = - new HashMap>(); + protected static final Map EMPTY_MAP = + Collections.emptyMap(); + protected static final Map> EMPTY_PRIMARY_FILTERS = + Collections.emptyMap(); + protected static final Map> EMPTY_REL_ENTITIES = + Collections.emptyMap(); protected ApplicationTimelineStore store; - private String entity1; - private String entityType1; - private String entity1b; - private String entity2; - private String entityType2; - private Map primaryFilters; - private Map secondaryFilters; - private Map allFilters; - private Map otherInfo; - private Map> relEntityMap; - private NameValuePair userFilter; - private Collection goodTestingFilters; - private Collection badTestingFilters; - private ATSEvent ev1; - private ATSEvent ev2; - private ATSEvent ev3; - private ATSEvent ev4; - private Map eventInfo; - private List events1; - private List events2; + protected String entity1; + protected String entityType1; + protected String entity1b; + protected String entity2; + protected String entityType2; + protected Map> primaryFilters; + protected Map secondaryFilters; + protected Map allFilters; + protected Map otherInfo; + protected Map> relEntityMap; + protected NameValuePair userFilter; + protected Collection goodTestingFilters; + protected Collection badTestingFilters; + protected ATSEvent ev1; + protected ATSEvent ev2; + protected ATSEvent ev3; + protected ATSEvent ev4; + protected Map eventInfo; + protected List events1; + protected List events2; /** * Load test data into the given store */ protected void loadTestData() throws IOException { ATSEntities atsEntities = new ATSEntities(); - Map primaryFilters = new HashMap(); - primaryFilters.put("user", "username"); - primaryFilters.put("appname", 12345l); + Map> primaryFilters = + new HashMap>(); + Set l1 = new HashSet(); + l1.add("username"); + Set l2 = new HashSet(); + l2.add(12345l); + primaryFilters.put("user", l1); + primaryFilters.put("appname", l2); Map secondaryFilters = new HashMap(); secondaryFilters.put("startTime", 123456l); secondaryFilters.put("status", "RUNNING"); @@ -91,9 +100,9 @@ public class ApplicationTimelineStoreTestUtils { String entity2 = "id_2"; String entityType2 = "type_2"; - Map> relatedEntities = - new HashMap>(); - relatedEntities.put(entityType2, Collections.singletonList(entity2)); + Map> relatedEntities = + new HashMap>(); + relatedEntities.put(entityType2, Collections.singleton(entity2)); ATSEvent ev3 = createEvent(789l, "launch_event", null); ATSEvent ev4 = createEvent(-123l, "init_event", null); @@ -156,15 +165,23 @@ public class ApplicationTimelineStoreTestUtils { badTestingFilters.add(new NameValuePair("appname", 12345l)); badTestingFilters.add(new NameValuePair("status", "FINISHED")); - primaryFilters = new HashMap(); - primaryFilters.put("user", "username"); - primaryFilters.put("appname", 12345l); + primaryFilters = new HashMap>(); + Set l1 = new HashSet(); + l1.add("username"); + Set l2 = new HashSet(); + l2.add(12345l); + primaryFilters.put("user", l1); + primaryFilters.put("appname", l2); secondaryFilters = new HashMap(); secondaryFilters.put("startTime", 123456l); secondaryFilters.put("status", "RUNNING"); allFilters = new HashMap(); allFilters.putAll(secondaryFilters); - allFilters.putAll(primaryFilters); + for (Entry> pf : primaryFilters.entrySet()) { + for (Object o : pf.getValue()) { + allFilters.put(pf.getKey(), o); + } + } otherInfo = new HashMap(); otherInfo.put("info1", "val1"); otherInfo.put("info2", "val2"); @@ -186,8 +203,8 @@ public class ApplicationTimelineStoreTestUtils { events1.add(ev1); relEntityMap = - new HashMap>(); - List ids = new ArrayList(); + new HashMap>(); + Set ids = new HashSet(); ids.add(entity1); ids.add(entity1b); relEntityMap.put(entityType1, ids); @@ -212,8 +229,8 @@ public class ApplicationTimelineStoreTestUtils { primaryFilters, otherInfo, store.getEntity(entity1b, entityType1, EnumSet.allOf(Field.class))); - verifyEntityInfo(entity2, entityType2, events2, relEntityMap, EMPTY_MAP, - EMPTY_MAP, store.getEntity(entity2, entityType2, + verifyEntityInfo(entity2, entityType2, events2, relEntityMap, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, store.getEntity(entity2, entityType2, EnumSet.allOf(Field.class))); // test getting single fields @@ -267,8 +284,8 @@ public class ApplicationTimelineStoreTestUtils { entities = store.getEntities("type_2", null, null, null, null, null, EnumSet.allOf(Field.class)).getEntities(); assertEquals(1, entities.size()); - verifyEntityInfo(entity2, entityType2, events2, relEntityMap, EMPTY_MAP, - EMPTY_MAP, entities.get(0)); + verifyEntityInfo(entity2, entityType2, events2, relEntityMap, + EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0)); entities = store.getEntities("type_1", 1l, null, null, null, null, EnumSet.allOf(Field.class)).getEntities(); @@ -457,9 +474,9 @@ public class ApplicationTimelineStoreTestUtils { /** * Verify a single entity */ - private static void verifyEntityInfo(String entity, String entityType, - List events, Map> relatedEntities, - Map primaryFilters, Map otherInfo, + protected static void verifyEntityInfo(String entity, String entityType, + List events, Map> relatedEntities, + Map> primaryFilters, Map otherInfo, ATSEntity retrievedEntityInfo) { if (entity == null) { assertNull(retrievedEntityInfo); @@ -467,23 +484,27 @@ public class ApplicationTimelineStoreTestUtils { } assertEquals(entity, retrievedEntityInfo.getEntityId()); assertEquals(entityType, retrievedEntityInfo.getEntityType()); - if (events == null) + if (events == null) { assertNull(retrievedEntityInfo.getEvents()); - else + } else { assertEquals(events, retrievedEntityInfo.getEvents()); - if (relatedEntities == null) + } + if (relatedEntities == null) { assertNull(retrievedEntityInfo.getRelatedEntities()); - else + } else { assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities()); - if (primaryFilters == null) + } + if (primaryFilters == null) { assertNull(retrievedEntityInfo.getPrimaryFilters()); - else + } else { assertTrue(primaryFilters.equals( retrievedEntityInfo.getPrimaryFilters())); - if (otherInfo == null) + } + if (otherInfo == null) { assertNull(retrievedEntityInfo.getOtherInfo()); - else + } else { assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo())); + } } /** @@ -503,21 +524,25 @@ public class ApplicationTimelineStoreTestUtils { /** * Create a test entity */ - private static ATSEntity createEntity(String entity, String entityType, + protected static ATSEntity createEntity(String entity, String entityType, Long startTime, List events, - Map> relatedEntities, - Map primaryFilters, Map otherInfo) { + Map> relatedEntities, + Map> primaryFilters, + Map otherInfo) { ATSEntity atsEntity = new ATSEntity(); atsEntity.setEntityId(entity); atsEntity.setEntityType(entityType); atsEntity.setStartTime(startTime); atsEntity.setEvents(events); - if (relatedEntities != null) - for (Entry> e : relatedEntities.entrySet()) - for (String v : e.getValue()) + if (relatedEntities != null) { + for (Entry> e : relatedEntities.entrySet()) { + for (String v : e.getValue()) { atsEntity.addRelatedEntity(e.getKey(), v); - else + } + } + } else { atsEntity.setRelatedEntities(null); + } atsEntity.setPrimaryFilters(primaryFilters); atsEntity.setOtherInfo(otherInfo); return atsEntity;