YARN-1729. Made TimelineWebServices deserialize the string primary- and secondary-filters param into the JSON-compatible object. Contributed by Billie Rinaldi.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1573825 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5bf449e402
commit
33714d9ad6
|
@ -381,6 +381,11 @@ Release 2.4.0 - UNRELEASED
|
|||
YARN-1748. Excluded core-site.xml from hadoop-yarn-server-tests package's jar
|
||||
and thus avoid breaking downstream tests. (Sravya Tirukkovalur via vinodkv)
|
||||
|
||||
YARN-1729. Made TimelineWebServices deserialize the string primary- and
|
||||
secondary-filters param into the JSON-compatible object. (Billie Rinaldi via
|
||||
zjshen)
|
||||
|
||||
|
||||
Release 2.3.1 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -17,27 +17,18 @@
|
|||
*/
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.codehaus.jackson.map.ObjectReader;
|
||||
import org.codehaus.jackson.map.ObjectWriter;
|
||||
|
||||
/**
|
||||
* A utility class providing methods for serializing and deserializing
|
||||
* objects. The {@link #write(Object)}, {@link #read(byte[])} and {@link
|
||||
* #write(java.io.DataOutputStream, Object)}, {@link
|
||||
* #read(java.io.DataInputStream)} methods are used by the
|
||||
* {@link LeveldbTimelineStore} to store and retrieve arbitrary
|
||||
* objects. The {@link #write(Object)} and {@link #read(byte[])} methods are
|
||||
* used by the {@link LeveldbTimelineStore} to store and retrieve arbitrary
|
||||
* JSON, while the {@link #writeReverseOrderedLong} and {@link
|
||||
* #readReverseOrderedLong} methods are used to sort entities in descending
|
||||
* start time order.
|
||||
|
@ -47,79 +38,31 @@ import org.codehaus.jackson.map.ObjectMapper;
|
|||
public class GenericObjectMapper {
|
||||
private static final byte[] EMPTY_BYTES = new byte[0];
|
||||
|
||||
private static final byte LONG = 0x1;
|
||||
private static final byte INTEGER = 0x2;
|
||||
private static final byte DOUBLE = 0x3;
|
||||
private static final byte STRING = 0x4;
|
||||
private static final byte BOOLEAN = 0x5;
|
||||
private static final byte LIST = 0x6;
|
||||
private static final byte MAP = 0x7;
|
||||
public static final ObjectReader OBJECT_READER;
|
||||
public static final ObjectWriter OBJECT_WRITER;
|
||||
|
||||
static {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
OBJECT_READER = mapper.reader(Object.class);
|
||||
OBJECT_WRITER = mapper.writer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes an Object into a byte array. Along with {@link #read(byte[]) },
|
||||
* Serializes an Object into a byte array. Along with {@link #read(byte[])},
|
||||
* can be used to serialize an Object and deserialize it into an Object of
|
||||
* the same type without needing to specify the Object's type,
|
||||
* as long as it is one of the JSON-compatible objects Long, Integer,
|
||||
* Double, String, Boolean, List, or Map. The current implementation uses
|
||||
* ObjectMapper to serialize complex objects (List and Map) while using
|
||||
* Writable to serialize simpler objects, to produce fewer bytes.
|
||||
* as long as it is one of the JSON-compatible objects understood by
|
||||
* ObjectMapper.
|
||||
*
|
||||
* @param o An Object
|
||||
* @return A byte array representation of the Object
|
||||
* @throws IOException
|
||||
*/
|
||||
public static byte[] write(Object o) throws IOException {
|
||||
if (o == null)
|
||||
if (o == null) {
|
||||
return EMPTY_BYTES;
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
write(new DataOutputStream(baos), o);
|
||||
return baos.toByteArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes an Object and writes it to a DataOutputStream. Along with
|
||||
* {@link #read(java.io.DataInputStream)}, can be used to serialize an Object
|
||||
* and deserialize it into an Object of the same type without needing to
|
||||
* specify the Object's type, as long as it is one of the JSON-compatible
|
||||
* objects Long, Integer, Double, String, Boolean, List, or Map. The current
|
||||
* implementation uses ObjectMapper to serialize complex objects (List and
|
||||
* Map) while using Writable to serialize simpler objects, to produce fewer
|
||||
* bytes.
|
||||
*
|
||||
* @param dos A DataOutputStream
|
||||
* @param o An Object
|
||||
* @throws IOException
|
||||
*/
|
||||
public static void write(DataOutputStream dos, Object o)
|
||||
throws IOException {
|
||||
if (o == null)
|
||||
return;
|
||||
if (o instanceof Long) {
|
||||
dos.write(LONG);
|
||||
WritableUtils.writeVLong(dos, (Long) o);
|
||||
} else if(o instanceof Integer) {
|
||||
dos.write(INTEGER);
|
||||
WritableUtils.writeVInt(dos, (Integer) o);
|
||||
} else if(o instanceof Double) {
|
||||
dos.write(DOUBLE);
|
||||
dos.writeDouble((Double) o);
|
||||
} else if (o instanceof String) {
|
||||
dos.write(STRING);
|
||||
WritableUtils.writeString(dos, (String) o);
|
||||
} else if (o instanceof Boolean) {
|
||||
dos.write(BOOLEAN);
|
||||
dos.writeBoolean((Boolean) o);
|
||||
} else if (o instanceof List) {
|
||||
dos.write(LIST);
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.writeValue(dos, o);
|
||||
} else if (o instanceof Map) {
|
||||
dos.write(MAP);
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.writeValue(dos, o);
|
||||
} else {
|
||||
throw new IOException("Couldn't serialize object");
|
||||
}
|
||||
return OBJECT_WRITER.writeValueAsBytes(o);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -147,42 +90,7 @@ public class GenericObjectMapper {
|
|||
if (b == null || b.length == 0) {
|
||||
return null;
|
||||
}
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(b, offset,
|
||||
b.length - offset);
|
||||
return read(new DataInputStream(bais));
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an Object from a DataInputStream whose data has been written with
|
||||
* {@link #write(java.io.DataOutputStream, Object)}.
|
||||
*
|
||||
* @param dis A DataInputStream
|
||||
* @return An Object, null if an unrecognized type
|
||||
* @throws IOException
|
||||
*/
|
||||
public static Object read(DataInputStream dis) throws IOException {
|
||||
byte code = (byte)dis.read();
|
||||
ObjectMapper mapper;
|
||||
switch (code) {
|
||||
case LONG:
|
||||
return WritableUtils.readVLong(dis);
|
||||
case INTEGER:
|
||||
return WritableUtils.readVInt(dis);
|
||||
case DOUBLE:
|
||||
return dis.readDouble();
|
||||
case STRING:
|
||||
return WritableUtils.readString(dis);
|
||||
case BOOLEAN:
|
||||
return dis.readBoolean();
|
||||
case LIST:
|
||||
mapper = new ObjectMapper();
|
||||
return mapper.readValue(dis, ArrayList.class);
|
||||
case MAP:
|
||||
mapper = new ObjectMapper();
|
||||
return mapper.readValue(dis, HashMap.class);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
return OBJECT_READER.readValue(b, offset, b.length - offset);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -195,8 +103,9 @@ public class GenericObjectMapper {
|
|||
public static byte[] writeReverseOrderedLong(long l) {
|
||||
byte[] b = new byte[8];
|
||||
b[0] = (byte)(0x7f ^ ((l >> 56) & 0xff));
|
||||
for (int i = 1; i < 7; i++)
|
||||
for (int i = 1; i < 7; i++) {
|
||||
b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff));
|
||||
}
|
||||
b[7] = (byte)(0xff ^ (l & 0xff));
|
||||
return b;
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.EnumSet;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.PriorityQueue;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
|
@ -94,12 +95,13 @@ public class MemoryTimelineStore
|
|||
!matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
|
||||
continue;
|
||||
}
|
||||
if (secondaryFilters != null) { // OR logic
|
||||
boolean flag = false;
|
||||
if (secondaryFilters != null) { // AND logic
|
||||
boolean flag = true;
|
||||
for (NameValuePair secondaryFilter : secondaryFilters) {
|
||||
if (secondaryFilter != null &&
|
||||
matchFilter(entity.getOtherInfo(), secondaryFilter)) {
|
||||
flag = true;
|
||||
if (secondaryFilter != null && !matchPrimaryFilter(
|
||||
entity.getPrimaryFilters(), secondaryFilter) &&
|
||||
!matchFilter(entity.getOtherInfo(), secondaryFilter)) {
|
||||
flag = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -220,16 +222,22 @@ public class MemoryTimelineStore
|
|||
}
|
||||
if (entity.getPrimaryFilters() != null) {
|
||||
if (existingEntity.getPrimaryFilters() == null) {
|
||||
existingEntity.setPrimaryFilters(entity.getPrimaryFilters());
|
||||
} else {
|
||||
existingEntity.addPrimaryFilters(entity.getPrimaryFilters());
|
||||
existingEntity.setPrimaryFilters(new HashMap<String, Set<Object>>());
|
||||
}
|
||||
for (Entry<String, Set<Object>> pf :
|
||||
entity.getPrimaryFilters().entrySet()) {
|
||||
for (Object pfo : pf.getValue()) {
|
||||
existingEntity.addPrimaryFilter(pf.getKey(), maybeConvert(pfo));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (entity.getOtherInfo() != null) {
|
||||
if (existingEntity.getOtherInfo() == null) {
|
||||
existingEntity.setOtherInfo(entity.getOtherInfo());
|
||||
} else {
|
||||
existingEntity.addOtherInfo(entity.getOtherInfo());
|
||||
existingEntity.setOtherInfo(new HashMap<String, Object>());
|
||||
}
|
||||
for (Entry<String, Object> info : entity.getOtherInfo().entrySet()) {
|
||||
existingEntity.addOtherInfo(info.getKey(),
|
||||
maybeConvert(info.getValue()));
|
||||
}
|
||||
}
|
||||
// relate it to other entities
|
||||
|
@ -303,4 +311,14 @@ public class MemoryTimelineStore
|
|||
}
|
||||
}
|
||||
|
||||
private static Object maybeConvert(Object o) {
|
||||
if (o instanceof Long) {
|
||||
Long l = (Long)o;
|
||||
if (l >= Integer.MIN_VALUE && l <= Integer.MAX_VALUE) {
|
||||
return l.intValue();
|
||||
}
|
||||
}
|
||||
return o;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -54,6 +54,7 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
|
|||
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
|
||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
|
||||
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
|
||||
|
@ -273,7 +274,13 @@ public class TimelineWebServices {
|
|||
return null;
|
||||
}
|
||||
String[] strs = str.split(delimiter, 2);
|
||||
return new NameValuePair(strs[0].trim(), strs[1].trim());
|
||||
try {
|
||||
return new NameValuePair(strs[0].trim(),
|
||||
GenericObjectMapper.OBJECT_READER.readValue(strs[1].trim()));
|
||||
} catch (Exception e) {
|
||||
// didn't work as an Object, keep it as a String
|
||||
return new NameValuePair(strs[0].trim(), strs[1].trim());
|
||||
}
|
||||
}
|
||||
|
||||
private static Collection<NameValuePair> parsePairsStr(
|
||||
|
@ -297,24 +304,29 @@ public class TimelineWebServices {
|
|||
List<Field> fieldList = new ArrayList<Field>();
|
||||
for (String s : strs) {
|
||||
s = s.trim().toUpperCase();
|
||||
if (s.equals("EVENTS"))
|
||||
if (s.equals("EVENTS")) {
|
||||
fieldList.add(Field.EVENTS);
|
||||
else if (s.equals("LASTEVENTONLY"))
|
||||
} else if (s.equals("LASTEVENTONLY")) {
|
||||
fieldList.add(Field.LAST_EVENT_ONLY);
|
||||
else if (s.equals("RELATEDENTITIES"))
|
||||
} else if (s.equals("RELATEDENTITIES")) {
|
||||
fieldList.add(Field.RELATED_ENTITIES);
|
||||
else if (s.equals("PRIMARYFILTERS"))
|
||||
} else if (s.equals("PRIMARYFILTERS")) {
|
||||
fieldList.add(Field.PRIMARY_FILTERS);
|
||||
else if (s.equals("OTHERINFO"))
|
||||
} else if (s.equals("OTHERINFO")) {
|
||||
fieldList.add(Field.OTHER_INFO);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Requested nonexistent field " + s);
|
||||
}
|
||||
}
|
||||
if (fieldList.size() == 0)
|
||||
if (fieldList.size() == 0) {
|
||||
return null;
|
||||
}
|
||||
Field f1 = fieldList.remove(fieldList.size() - 1);
|
||||
if (fieldList.size() == 0)
|
||||
if (fieldList.size() == 0) {
|
||||
return EnumSet.of(f1);
|
||||
else
|
||||
} else {
|
||||
return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()]));
|
||||
}
|
||||
}
|
||||
|
||||
private static Long parseLongStr(String str) {
|
||||
|
|
|
@ -72,7 +72,19 @@ public class TestGenericObjectMapper {
|
|||
|
||||
@Test
|
||||
public void testValueTypes() throws IOException {
|
||||
verify(42l);
|
||||
verify(Integer.MAX_VALUE);
|
||||
verify(Integer.MIN_VALUE);
|
||||
assertEquals(Integer.MAX_VALUE, GenericObjectMapper.read(
|
||||
GenericObjectMapper.write((long) Integer.MAX_VALUE)));
|
||||
assertEquals(Integer.MIN_VALUE, GenericObjectMapper.read(
|
||||
GenericObjectMapper.write((long) Integer.MIN_VALUE)));
|
||||
verify((long)Integer.MAX_VALUE + 1l);
|
||||
verify((long)Integer.MIN_VALUE - 1l);
|
||||
|
||||
verify(Long.MAX_VALUE);
|
||||
verify(Long.MIN_VALUE);
|
||||
|
||||
assertEquals(42, GenericObjectMapper.read(GenericObjectMapper.write(42l)));
|
||||
verify(42);
|
||||
verify(1.23);
|
||||
verify("abc");
|
||||
|
|
|
@ -66,6 +66,9 @@ public class TimelineStoreTestUtils {
|
|||
protected Map<String, Object> otherInfo;
|
||||
protected Map<String, Set<String>> relEntityMap;
|
||||
protected NameValuePair userFilter;
|
||||
protected NameValuePair numericFilter1;
|
||||
protected NameValuePair numericFilter2;
|
||||
protected NameValuePair numericFilter3;
|
||||
protected Collection<NameValuePair> goodTestingFilters;
|
||||
protected Collection<NameValuePair> badTestingFilters;
|
||||
protected TimelineEvent ev1;
|
||||
|
@ -86,9 +89,15 @@ public class TimelineStoreTestUtils {
|
|||
Set<Object> l1 = new HashSet<Object>();
|
||||
l1.add("username");
|
||||
Set<Object> l2 = new HashSet<Object>();
|
||||
l2.add(12345l);
|
||||
l2.add((long)Integer.MAX_VALUE);
|
||||
Set<Object> l3 = new HashSet<Object>();
|
||||
l3.add("123abc");
|
||||
Set<Object> l4 = new HashSet<Object>();
|
||||
l4.add((long)Integer.MAX_VALUE + 1l);
|
||||
primaryFilters.put("user", l1);
|
||||
primaryFilters.put("appname", l2);
|
||||
primaryFilters.put("other", l3);
|
||||
primaryFilters.put("long", l4);
|
||||
Map<String, Object> secondaryFilters = new HashMap<String, Object>();
|
||||
secondaryFilters.put("startTime", 123456l);
|
||||
secondaryFilters.put("status", "RUNNING");
|
||||
|
@ -158,24 +167,32 @@ public class TimelineStoreTestUtils {
|
|||
* Load verification data
|
||||
*/
|
||||
protected void loadVerificationData() throws Exception {
|
||||
userFilter = new NameValuePair("user",
|
||||
"username");
|
||||
userFilter = new NameValuePair("user", "username");
|
||||
numericFilter1 = new NameValuePair("appname", Integer.MAX_VALUE);
|
||||
numericFilter2 = new NameValuePair("long", (long)Integer.MAX_VALUE + 1l);
|
||||
numericFilter3 = new NameValuePair("other", "123abc");
|
||||
goodTestingFilters = new ArrayList<NameValuePair>();
|
||||
goodTestingFilters.add(new NameValuePair("appname", 12345l));
|
||||
goodTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
|
||||
goodTestingFilters.add(new NameValuePair("status", "RUNNING"));
|
||||
badTestingFilters = new ArrayList<NameValuePair>();
|
||||
badTestingFilters.add(new NameValuePair("appname", 12345l));
|
||||
badTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
|
||||
badTestingFilters.add(new NameValuePair("status", "FINISHED"));
|
||||
|
||||
primaryFilters = new HashMap<String, Set<Object>>();
|
||||
Set<Object> l1 = new HashSet<Object>();
|
||||
l1.add("username");
|
||||
Set<Object> l2 = new HashSet<Object>();
|
||||
l2.add(12345l);
|
||||
l2.add(Integer.MAX_VALUE);
|
||||
Set<Object> l3 = new HashSet<Object>();
|
||||
l3.add("123abc");
|
||||
Set<Object> l4 = new HashSet<Object>();
|
||||
l4.add((long)Integer.MAX_VALUE + 1l);
|
||||
primaryFilters.put("user", l1);
|
||||
primaryFilters.put("appname", l2);
|
||||
primaryFilters.put("other", l3);
|
||||
primaryFilters.put("long", l4);
|
||||
secondaryFilters = new HashMap<String, Object>();
|
||||
secondaryFilters.put("startTime", 123456l);
|
||||
secondaryFilters.put("startTime", 123456);
|
||||
secondaryFilters.put("status", "RUNNING");
|
||||
allFilters = new HashMap<String, Object>();
|
||||
allFilters.putAll(secondaryFilters);
|
||||
|
@ -353,6 +370,30 @@ public class TimelineStoreTestUtils {
|
|||
verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
|
||||
primaryFilters, otherInfo, entities.get(1));
|
||||
|
||||
store.getEntities("type_1", null, null, null,
|
||||
numericFilter1, null, EnumSet.allOf(Field.class)).getEntities();
|
||||
assertEquals(2, entities.size());
|
||||
verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
|
||||
primaryFilters, otherInfo, entities.get(0));
|
||||
verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
|
||||
primaryFilters, otherInfo, entities.get(1));
|
||||
|
||||
store.getEntities("type_1", null, null, null,
|
||||
numericFilter2, null, EnumSet.allOf(Field.class)).getEntities();
|
||||
assertEquals(2, entities.size());
|
||||
verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
|
||||
primaryFilters, otherInfo, entities.get(0));
|
||||
verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
|
||||
primaryFilters, otherInfo, entities.get(1));
|
||||
|
||||
store.getEntities("type_1", null, null, null,
|
||||
numericFilter3, null, EnumSet.allOf(Field.class)).getEntities();
|
||||
assertEquals(2, entities.size());
|
||||
verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
|
||||
primaryFilters, otherInfo, entities.get(0));
|
||||
verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
|
||||
primaryFilters, otherInfo, entities.get(1));
|
||||
|
||||
entities = store.getEntities("type_2", null, null, null, userFilter, null,
|
||||
EnumSet.allOf(Field.class)).getEntities();
|
||||
assertEquals(0, entities.size());
|
||||
|
|
|
@ -109,15 +109,7 @@ public class TestTimelineWebServices extends JerseyTest {
|
|||
Assert.assertEquals("Timeline API", about.getAbout());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetEntities() throws Exception {
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1")
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
TimelineEntities entities = response.getEntity(TimelineEntities.class);
|
||||
private static void verifyEntities(TimelineEntities entities) {
|
||||
Assert.assertNotNull(entities);
|
||||
Assert.assertEquals(2, entities.getEntities().size());
|
||||
TimelineEntity entity1 = entities.getEntities().get(0);
|
||||
|
@ -126,7 +118,7 @@ public class TestTimelineWebServices extends JerseyTest {
|
|||
Assert.assertEquals("type_1", entity1.getEntityType());
|
||||
Assert.assertEquals(123l, entity1.getStartTime().longValue());
|
||||
Assert.assertEquals(2, entity1.getEvents().size());
|
||||
Assert.assertEquals(2, entity1.getPrimaryFilters().size());
|
||||
Assert.assertEquals(4, entity1.getPrimaryFilters().size());
|
||||
Assert.assertEquals(4, entity1.getOtherInfo().size());
|
||||
TimelineEntity entity2 = entities.getEntities().get(1);
|
||||
Assert.assertNotNull(entity2);
|
||||
|
@ -134,10 +126,94 @@ public class TestTimelineWebServices extends JerseyTest {
|
|||
Assert.assertEquals("type_1", entity2.getEntityType());
|
||||
Assert.assertEquals(123l, entity2.getStartTime().longValue());
|
||||
Assert.assertEquals(2, entity2.getEvents().size());
|
||||
Assert.assertEquals(2, entity2.getPrimaryFilters().size());
|
||||
Assert.assertEquals(4, entity2.getPrimaryFilters().size());
|
||||
Assert.assertEquals(4, entity2.getOtherInfo().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetEntities() throws Exception {
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1")
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
verifyEntities(response.getEntity(TimelineEntities.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrimaryFilterString() {
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1").queryParam("primaryFilter", "user:username")
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
verifyEntities(response.getEntity(TimelineEntities.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrimaryFilterInteger() {
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1").queryParam("primaryFilter",
|
||||
"appname:" + Integer.toString(Integer.MAX_VALUE))
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
verifyEntities(response.getEntity(TimelineEntities.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrimaryFilterLong() {
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1").queryParam("primaryFilter",
|
||||
"long:" + Long.toString((long)Integer.MAX_VALUE + 1l))
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
verifyEntities(response.getEntity(TimelineEntities.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrimaryFilterNumericString() {
|
||||
// without quotes, 123abc is interpreted as the number 123,
|
||||
// which finds no entities
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1").queryParam("primaryFilter", "other:123abc")
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
assertEquals(0, response.getEntity(TimelineEntities.class).getEntities()
|
||||
.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrimaryFilterNumericStringWithQuotes() {
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1").queryParam("primaryFilter", "other:\"123abc\"")
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
verifyEntities(response.getEntity(TimelineEntities.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSecondaryFilters() {
|
||||
WebResource r = resource();
|
||||
ClientResponse response = r.path("ws").path("v1").path("timeline")
|
||||
.path("type_1")
|
||||
.queryParam("secondaryFilter",
|
||||
"user:username,appname:" + Integer.toString(Integer.MAX_VALUE))
|
||||
.accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
verifyEntities(response.getEntity(TimelineEntities.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetEntity() throws Exception {
|
||||
WebResource r = resource();
|
||||
|
@ -152,7 +228,7 @@ public class TestTimelineWebServices extends JerseyTest {
|
|||
Assert.assertEquals("type_1", entity.getEntityType());
|
||||
Assert.assertEquals(123l, entity.getStartTime().longValue());
|
||||
Assert.assertEquals(2, entity.getEvents().size());
|
||||
Assert.assertEquals(2, entity.getPrimaryFilters().size());
|
||||
Assert.assertEquals(4, entity.getPrimaryFilters().size());
|
||||
Assert.assertEquals(4, entity.getOtherInfo().size());
|
||||
}
|
||||
|
||||
|
@ -189,7 +265,7 @@ public class TestTimelineWebServices extends JerseyTest {
|
|||
Assert.assertEquals("type_1", entity.getEntityType());
|
||||
Assert.assertEquals(123l, entity.getStartTime().longValue());
|
||||
Assert.assertEquals(1, entity.getEvents().size());
|
||||
Assert.assertEquals(2, entity.getPrimaryFilters().size());
|
||||
Assert.assertEquals(4, entity.getPrimaryFilters().size());
|
||||
Assert.assertEquals(0, entity.getOtherInfo().size());
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue