From 5e66d65bfd297e2f92b355e07b0542f784012663 Mon Sep 17 00:00:00 2001 From: Uwe Schindler Date: Mon, 13 Jun 2011 16:03:20 +0000 Subject: [PATCH] SOLR-2400: Use int[] for position history in serialization git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1135154 13f79535-47bb-0310-9956-ffa450edef68 --- solr/CHANGES.txt | 4 +- .../handler/AnalysisRequestHandlerBase.java | 22 +- .../AnalysisRequestHandlerTestBase.java | 9 +- .../DocumentAnalysisRequestHandlerTest.java | 72 +++---- .../FieldAnalysisRequestHandlerTest.java | 200 +++++++++--------- 5 files changed, 154 insertions(+), 153 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 1663a89d98e..552b6a9d5cd 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -287,8 +287,8 @@ New Features * SOLR-2400: Field- and DocumentAnalysisRequestHandler now provide a position history for each token, so you can follow the token through all analysis stages. - The output contains a separate string attribute, that is a "/"-delimited string - containing all positions from previous Tokenizers/TokenFilters. + The output contains a separate int[] attribute containing all positions from + previous Tokenizers/TokenFilters (called "positionHistory"). (Uwe Schindler) Optimizations diff --git a/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java b/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java index 13f915f556e..5def900e97f 100644 --- a/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java +++ b/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java @@ -44,6 +44,7 @@ import org.apache.solr.schema.FieldType; import java.io.IOException; import java.io.StringReader; import java.util.*; +import org.apache.commons.lang.ArrayUtils; /** * A base class for all analysis request handlers. @@ -361,26 +362,30 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase { public static final class TokenTrackingAttributeImpl extends AttributeImpl implements TokenTrackingAttribute { private int[] basePositions = new int[0]; private int position = 0; + private transient int[] cachedPositions = null; public void freezeStage() { this.basePositions = getPositions(); this.position = 0; + this.cachedPositions = null; } public void setActPosition(int pos) { this.position = pos; + this.cachedPositions = null; } public int[] getPositions() { - final int[] positions = new int[basePositions.length + 1]; - System.arraycopy(basePositions, 0, positions, 0, basePositions.length); - positions[basePositions.length] = position; - return positions; + if (cachedPositions == null) { + cachedPositions = ArrayUtils.add(basePositions, position); + } + return cachedPositions; } public void reset(int[] basePositions, int position) { this.basePositions = basePositions; this.position = position; + this.cachedPositions = null; } @Override @@ -390,14 +395,9 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase { @Override public void reflectWith(AttributeReflector reflector) { - final int[] positions = getPositions(); - final StringBuilder sb = new StringBuilder(positions.length * 2); - for (int p : positions) { - if (sb.length() > 0) sb.append('/'); - sb.append(p); - } - reflector.reflect(TokenTrackingAttribute.class, "positionHistory", sb.toString()); reflector.reflect(TokenTrackingAttribute.class, "position", position); + // convert to Integer[] array, as only such one can be serialized by ResponseWriters + reflector.reflect(TokenTrackingAttribute.class, "positionHistory", ArrayUtils.toObject(getPositions())); } @Override diff --git a/solr/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java b/solr/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java index 5244d7dd2d0..5ad0787560b 100644 --- a/solr/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java +++ b/solr/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java @@ -19,6 +19,7 @@ package org.apache.solr.handler; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.NamedList; +import org.apache.commons.lang.ArrayUtils; /** * A base class for all analysis request handler tests. @@ -37,7 +38,7 @@ public abstract class AnalysisRequestHandlerTestBase extends SolrTestCaseJ4 { assertEquals(new Integer(info.getStart()), token.get("start")); assertEquals(new Integer(info.getEnd()), token.get("end")); assertEquals(new Integer(info.getPosition()), token.get("position")); - assertEquals(info.getPositionHistory(), token.get("positionHistory")); + assertArrayEquals(info.getPositionHistory(), ArrayUtils.toPrimitive((Integer[]) token.get("positionHistory"))); if (info.isMatch()) { assertEquals(Boolean.TRUE, token.get("match")); } @@ -58,7 +59,7 @@ public abstract class AnalysisRequestHandlerTestBase extends SolrTestCaseJ4 { private int end; private String payload; private int position; - private String positionHistory; + private int[] positionHistory; private boolean match; public TokenInfo( @@ -68,7 +69,7 @@ public abstract class AnalysisRequestHandlerTestBase extends SolrTestCaseJ4 { int start, int end, int position, - String positionHistory, + int[] positionHistory, String payload, boolean match) { @@ -111,7 +112,7 @@ public abstract class AnalysisRequestHandlerTestBase extends SolrTestCaseJ4 { return position; } - public String getPositionHistory() { + public int[] getPositionHistory() { return positionHistory; } diff --git a/solr/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java b/solr/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java index 1f2ed7612be..506029ceb7c 100644 --- a/solr/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java +++ b/solr/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java @@ -235,7 +235,7 @@ public class DocumentAnalysisRequestHandlerTest extends AnalysisRequestHandlerTe assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); List tokenList = (List) queryResult.getVal(0); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, "1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[]{1}, null, false)); NamedList indexResult = idResult.get("index"); assertEquals("The id field has only a single value", 1, indexResult.size()); @@ -245,7 +245,7 @@ public class DocumentAnalysisRequestHandlerTest extends AnalysisRequestHandlerTe assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); tokenList = valueResult.getVal(0); assertEquals("The 'id' field value has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("1", null, "word", 0, 1, 1, "1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("1", null, "word", 0, 1, 1, new int[]{1}, null, false)); ***/ // the name field @@ -255,14 +255,14 @@ public class DocumentAnalysisRequestHandlerTest extends AnalysisRequestHandlerTe tokenList = (List) queryResult.get("org.apache.lucene.analysis.core.WhitespaceTokenizer"); assertNotNull("Expecting the 'WhitespaceTokenizer' to be applied on the query for the 'whitetok' field", tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, "1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[]{1}, null, false)); indexResult = whitetokResult.get("index"); assertEquals("The 'whitetok' field has only a single value", 1, indexResult.size()); valueResult = (NamedList>) indexResult.get("Jumping Jack"); tokenList = valueResult.getVal(0); assertEquals("Expecting 2 tokens to be present", 2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("Jumping", null, "word", 0, 7, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("Jack", null, "word", 8, 12, 2, "2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("Jumping", null, "word", 0, 7, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("Jack", null, "word", 8, 12, 2, new int[]{2}, null, false)); // the text field NamedList> textResult = documentResult.get("text"); @@ -271,66 +271,66 @@ public class DocumentAnalysisRequestHandlerTest extends AnalysisRequestHandlerTe tokenList = (List) queryResult.get("org.apache.lucene.analysis.standard.StandardTokenizer"); assertNotNull("Expecting the 'StandardTokenizer' to be applied on the query for the 'text' field", tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "", 0, 7, 1, "1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "", 0, 7, 1, new int[]{1}, null, false)); tokenList = (List) queryResult.get("org.apache.lucene.analysis.standard.StandardFilter"); assertNotNull("Expecting the 'StandardFilter' to be applied on the query for the 'text' field", tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "", 0, 7, 1, "1/1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "", 0, 7, 1, new int[]{1,1}, null, false)); tokenList = (List) queryResult.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expecting the 'LowerCaseFilter' to be applied on the query for the 'text' field", tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("jumping", null, "", 0, 7, 1, "1/1/1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("jumping", null, "", 0, 7, 1, new int[]{1,1,1}, null, false)); tokenList = (List) queryResult.get("org.apache.lucene.analysis.core.StopFilter"); assertNotNull("Expecting the 'StopFilter' to be applied on the query for the 'text' field", tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("jumping", null, "", 0, 7, 1, "1/1/1/1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("jumping", null, "", 0, 7, 1, new int[]{1,1,1,1}, null, false)); tokenList = (List) queryResult.get("org.apache.lucene.analysis.en.PorterStemFilter"); assertNotNull("Expecting the 'PorterStemFilter' to be applied on the query for the 'text' field", tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("jump", null, "", 0, 7, 1, "1/1/1/1/1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("jump", null, "", 0, 7, 1, new int[]{1,1,1,1,1}, null, false)); indexResult = textResult.get("index"); assertEquals("The 'text' field has only a single value", 1, indexResult.size()); valueResult = (NamedList>) indexResult.get("The Fox Jumped Over The Dogs"); tokenList = valueResult.get("org.apache.lucene.analysis.standard.StandardTokenizer"); assertNotNull("Expecting the 'StandardTokenizer' to be applied on the index for the 'text' field", tokenList); assertEquals("Expecting 6 tokens", 6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("The", null, "", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("Fox", null, "", 4, 7, 2, "2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("Jumped", null, "", 8, 14, 3, "3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("Over", null, "", 15, 19, 4, "4", null, false)); - assertToken(tokenList.get(4), new TokenInfo("The", null, "", 20, 23, 5, "5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("Dogs", null, "", 24, 28, 6, "6", null, false)); + assertToken(tokenList.get(0), new TokenInfo("The", null, "", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("Fox", null, "", 4, 7, 2, new int[]{2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("Jumped", null, "", 8, 14, 3, new int[]{3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("Over", null, "", 15, 19, 4, new int[]{4}, null, false)); + assertToken(tokenList.get(4), new TokenInfo("The", null, "", 20, 23, 5, new int[]{5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("Dogs", null, "", 24, 28, 6, new int[]{6}, null, false)); tokenList = valueResult.get("org.apache.lucene.analysis.standard.StandardFilter"); assertNotNull("Expecting the 'StandardFilter' to be applied on the index for the 'text' field", tokenList); assertEquals("Expecting 6 tokens", 6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("The", null, "", 0, 3, 1, "1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("Fox", null, "", 4, 7, 2, "2/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("Jumped", null, "", 8, 14, 3, "3/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("Over", null, "", 15, 19, 4, "4/4", null, false)); - assertToken(tokenList.get(4), new TokenInfo("The", null, "", 20, 23, 5, "5/5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("Dogs", null, "", 24, 28, 6, "6/6", null, false)); + assertToken(tokenList.get(0), new TokenInfo("The", null, "", 0, 3, 1, new int[]{1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("Fox", null, "", 4, 7, 2, new int[]{2,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("Jumped", null, "", 8, 14, 3, new int[]{3,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("Over", null, "", 15, 19, 4, new int[]{4,4}, null, false)); + assertToken(tokenList.get(4), new TokenInfo("The", null, "", 20, 23, 5, new int[]{5,5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("Dogs", null, "", 24, 28, 6, new int[]{6,6}, null, false)); tokenList = valueResult.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expecting the 'LowerCaseFilter' to be applied on the index for the 'text' field", tokenList); assertEquals("Expecting 6 tokens", 6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, "1/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("fox", null, "", 4, 7, 2, "2/2/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("jumped", null, "", 8, 14, 3, "3/3/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("over", null, "", 15, 19, 4, "4/4/4", null, false)); - assertToken(tokenList.get(4), new TokenInfo("the", null, "", 20, 23, 5, "5/5/5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("dogs", null, "", 24, 28, 6, "6/6/6", null, false)); + assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, new int[]{1,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("fox", null, "", 4, 7, 2, new int[]{2,2,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("jumped", null, "", 8, 14, 3, new int[]{3,3,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("over", null, "", 15, 19, 4, new int[]{4,4,4}, null, false)); + assertToken(tokenList.get(4), new TokenInfo("the", null, "", 20, 23, 5, new int[]{5,5,5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("dogs", null, "", 24, 28, 6, new int[]{6,6,6}, null, false)); tokenList = valueResult.get("org.apache.lucene.analysis.core.StopFilter"); assertNotNull("Expecting the 'StopFilter' to be applied on the index for the 'text' field", tokenList); assertEquals("Expecting 4 tokens after stop word removal", 4, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 4, 7, 1, "2/2/2/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("jumped", null, "", 8, 14, 2, "3/3/3/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("over", null, "", 15, 19, 3, "4/4/4/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("dogs", null, "", 24, 28, 4, "6/6/6/4", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 4, 7, 1, new int[]{2,2,2,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("jumped", null, "", 8, 14, 2, new int[]{3,3,3,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("over", null, "", 15, 19, 3, new int[]{4,4,4,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("dogs", null, "", 24, 28, 4, new int[]{6,6,6,4}, null, false)); tokenList = valueResult.get("org.apache.lucene.analysis.en.PorterStemFilter"); assertNotNull("Expecting the 'PorterStemFilter' to be applied on the index for the 'text' field", tokenList); assertEquals("Expecting 4 tokens", 4, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 4, 7, 1, "2/2/2/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("jump", null, "", 8, 14, 2, "3/3/3/2/2", null, true)); - assertToken(tokenList.get(2), new TokenInfo("over", null, "", 15, 19, 3, "4/4/4/3/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("dog", null, "", 24, 28, 4, "6/6/6/4/4", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 4, 7, 1, new int[]{2,2,2,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("jump", null, "", 8, 14, 2, new int[]{3,3,3,2,2}, null, true)); + assertToken(tokenList.get(2), new TokenInfo("over", null, "", 15, 19, 3, new int[]{4,4,4,3,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("dog", null, "", 24, 28, 4, new int[]{6,6,6,4,4}, null, false)); } } diff --git a/solr/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java b/solr/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java index 400b1020130..c58552190da 100644 --- a/solr/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java +++ b/solr/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java @@ -139,64 +139,64 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB List tokenList = indexPart.get("org.apache.lucene.analysis.standard.StandardTokenizer"); assertNotNull("Expcting StandardTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 10); - assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, "2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, "3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, "4", null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, "5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, "6", null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, "7", null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, "8", null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, "9", null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, "10", null, false)); + assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, new int[]{2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, new int[]{3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, new int[]{4}, null, true)); + assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, new int[]{5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, new int[]{6}, null, false)); + assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, new int[]{7}, null, false)); + assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, new int[]{8}, null, false)); + assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9}, null, true)); + assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, new int[]{10}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.standard.StandardFilter"); assertNotNull("Expcting StandardFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 10); - assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, "1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, "2/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, "3/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, "4/4", null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, "5/5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, "6/6", null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, "7/7", null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, "8/8", null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, "9/9", null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, "10/10", null, false)); + assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, new int[]{1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, new int[]{2,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, new int[]{3,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, new int[]{4,4}, null, true)); + assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, new int[]{5,5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, new int[]{6,6}, null, false)); + assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, new int[]{7,7}, null, false)); + assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, new int[]{8,8}, null, false)); + assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9,9}, null, true)); + assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, new int[]{10,10}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 10); - assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, "1/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, "2/2/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, "3/3/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, "4/4/4", null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, "5/5/5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, "6/6/6", null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, "7/7/7", null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, "8/8/8", null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, "9/9/9", null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, "10/10/10", null, false)); + assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, new int[]{1,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, new int[]{2,2,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, new int[]{3,3,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, new int[]{4,4,4}, null, true)); + assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, new int[]{5,5,5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, new int[]{6,6,6}, null, false)); + assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, new int[]{7,7,7}, null, false)); + assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, new int[]{8,8,8}, null, false)); + assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9,9,9}, null, true)); + assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, new int[]{10,10,10}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.core.StopFilter"); assertNotNull("Expcting StopFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 8); - assertToken(tokenList.get(0), new TokenInfo("quick", null, "", 4, 9, 1, "2/2/2/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("red", null, "", 10, 13, 2, "3/3/3/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("fox", null, "", 14, 17, 3, "4/4/4/3", null, true)); - assertToken(tokenList.get(3), new TokenInfo("jumped", null, "", 18, 24, 4, "5/5/5/4", null, false)); - assertToken(tokenList.get(4), new TokenInfo("over", null, "", 25, 29, 5, "6/6/6/5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("lazy", null, "", 34, 38, 6, "8/8/8/6", null, false)); - assertToken(tokenList.get(6), new TokenInfo("brown", null, "", 39, 44, 7, "9/9/9/7", null, true)); - assertToken(tokenList.get(7), new TokenInfo("dogs", null, "", 45, 49, 8, "10/10/10/8", null, false)); + assertToken(tokenList.get(0), new TokenInfo("quick", null, "", 4, 9, 1, new int[]{2,2,2,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("red", null, "", 10, 13, 2, new int[]{3,3,3,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("fox", null, "", 14, 17, 3, new int[]{4,4,4,3}, null, true)); + assertToken(tokenList.get(3), new TokenInfo("jumped", null, "", 18, 24, 4, new int[]{5,5,5,4}, null, false)); + assertToken(tokenList.get(4), new TokenInfo("over", null, "", 25, 29, 5, new int[]{6,6,6,5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("lazy", null, "", 34, 38, 6, new int[]{8,8,8,6}, null, false)); + assertToken(tokenList.get(6), new TokenInfo("brown", null, "", 39, 44, 7, new int[]{9,9,9,7}, null, true)); + assertToken(tokenList.get(7), new TokenInfo("dogs", null, "", 45, 49, 8, new int[]{10,10,10,8}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.en.PorterStemFilter"); assertNotNull("Expcting PorterStemFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 8); - assertToken(tokenList.get(0), new TokenInfo("quick", null, "", 4, 9, 1, "2/2/2/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("red", null, "", 10, 13, 2, "3/3/3/2/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("fox", null, "", 14, 17, 3, "4/4/4/3/3", null, true)); - assertToken(tokenList.get(3), new TokenInfo("jump", null, "", 18, 24, 4, "5/5/5/4/4", null, false)); - assertToken(tokenList.get(4), new TokenInfo("over", null, "", 25, 29, 5, "6/6/6/5/5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("lazi", null, "", 34, 38, 6, "8/8/8/6/6", null, false)); - assertToken(tokenList.get(6), new TokenInfo("brown", null, "", 39, 44, 7, "9/9/9/7/7", null, true)); - assertToken(tokenList.get(7), new TokenInfo("dog", null, "", 45, 49, 8, "10/10/10/8/8", null, false)); + assertToken(tokenList.get(0), new TokenInfo("quick", null, "", 4, 9, 1, new int[]{2,2,2,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("red", null, "", 10, 13, 2, new int[]{3,3,3,2,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("fox", null, "", 14, 17, 3, new int[]{4,4,4,3,3}, null, true)); + assertToken(tokenList.get(3), new TokenInfo("jump", null, "", 18, 24, 4, new int[]{5,5,5,4,4}, null, false)); + assertToken(tokenList.get(4), new TokenInfo("over", null, "", 25, 29, 5, new int[]{6,6,6,5,5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("lazi", null, "", 34, 38, 6, new int[]{8,8,8,6,6}, null, false)); + assertToken(tokenList.get(6), new TokenInfo("brown", null, "", 39, 44, 7, new int[]{9,9,9,7,7}, null, true)); + assertToken(tokenList.get(7), new TokenInfo("dog", null, "", 45, 49, 8, new int[]{10,10,10,8,8}, null, false)); NamedList> queryPart = textType.get("query"); assertNotNull("expecting a query token analysis for field type 'text'", queryPart); @@ -204,28 +204,28 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB tokenList = queryPart.get("org.apache.lucene.analysis.standard.StandardTokenizer"); assertNotNull("Expecting StandardTokenizer analysis breakdown", tokenList); assertEquals("Expecting StandardTokenizer to produce 2 tokens from '" + request.getQuery() + "'", 2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, "2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2}, null, false)); tokenList = queryPart.get("org.apache.lucene.analysis.standard.StandardFilter"); assertNotNull("Expcting StandardFilter analysis breakdown", tokenList); assertEquals(2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, "1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, "2/2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2}, null, false)); tokenList = queryPart.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList); assertEquals(2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, "1/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, "2/2/2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2,2}, null, false)); tokenList = queryPart.get("org.apache.lucene.analysis.core.StopFilter"); assertNotNull("Expcting StopFilter analysis breakdown", tokenList); assertEquals(2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, "1/1/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, "2/2/2/2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2,2,2}, null, false)); tokenList = queryPart.get("org.apache.lucene.analysis.en.PorterStemFilter"); assertNotNull("Expcting PorterStemFilter analysis breakdown", tokenList); assertEquals(2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, "1/1/1/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, "2/2/2/2/2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1,1,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2,2,2,2}, null, false)); NamedList nameTextType = fieldTypes.get("nametext"); assertNotNull("expecting result for field type 'nametext'", nameTextType); @@ -236,22 +236,22 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB tokenList = indexPart.get("org.apache.lucene.analysis.core.WhitespaceTokenizer"); assertNotNull("Expcting WhitespaceTokenizer analysis breakdown", tokenList); assertEquals(10, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, "2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, "3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, "4", null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, "5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, "6", null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, "7", null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, "8", null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, "9", null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, "10", null, false)); + assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, new int[]{3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[]{4}, null, true)); + assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, new int[]{5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, new int[]{6}, null, false)); + assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, new int[]{7}, null, false)); + assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, new int[]{8}, null, false)); + assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, new int[]{9}, null, true)); + assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, new int[]{10}, null, false)); queryPart = nameTextType.get("query"); assertNotNull("expecting a query token analysis for field type 'nametext'", queryPart); tokenList = queryPart.get(WhitespaceTokenizer.class.getName()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, "2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false)); NamedList fieldNames = result.get("field_names"); assertNotNull("field_nameds should never be null", fieldNames); @@ -265,16 +265,16 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB tokenList = indexPart.get(WhitespaceTokenizer.class.getName()); assertNotNull("expecting only WhitespaceTokenizer to be applied", tokenList); assertEquals("expecting WhitespaceTokenizer to produce 10 tokens", 10, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, "2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, "3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, "4", null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, "5", null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, "6", null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, "7", null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, "8", null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, "9", null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, "10", null, false)); + assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, new int[]{3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[]{4}, null, true)); + assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, new int[]{5}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, new int[]{6}, null, false)); + assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, new int[]{7}, null, false)); + assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, new int[]{8}, null, false)); + assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, new int[]{9}, null, true)); + assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, new int[]{10}, null, false)); queryPart = whitetok.get("query"); assertNotNull("expecting a query token analysis for field 'whitetok'", queryPart); @@ -282,8 +282,8 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB tokenList = queryPart.get(WhitespaceTokenizer.class.getName()); assertNotNull("expecting only WhitespaceTokenizer to be applied", tokenList); assertEquals("expecting WhitespaceTokenizer to produce 2 tokens", 2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, "2", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false)); NamedList keywordtok = fieldNames.get("keywordtok"); assertNotNull("expecting result for field 'keywordtok'", keywordtok); @@ -294,7 +294,7 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB tokenList = indexPart.get(KeywordTokenizer.class.getName()); assertNotNull("expecting only KeywordTokenizer to be applied", tokenList); assertEquals("expecting KeywordTokenizer to produce 1 token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the quick red fox jumped over the lazy brown dogs", null, "word", 0, 49, 1, "1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("the quick red fox jumped over the lazy brown dogs", null, "word", 0, 49, 1, new int[]{1}, null, false)); queryPart = keywordtok.get("query"); assertNotNull("expecting a query token analysis for field 'keywordtok'", queryPart); @@ -302,7 +302,7 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB tokenList = queryPart.get(KeywordTokenizer.class.getName()); assertNotNull("expecting only KeywordTokenizer to be applied", tokenList); assertEquals("expecting KeywordTokenizer to produce 1 token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox brown", null, "word", 0, 9, 1, "1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("fox brown", null, "word", 0, 9, 1, new int[]{1}, null, false)); } @@ -331,7 +331,7 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB List tokenList = (List)indexPart.get("org.apache.lucene.analysis.core.WhitespaceTokenizer"); assertNotNull("Expecting WhitespaceTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 1); - assertToken(tokenList.get(0), new TokenInfo("whatever", null, "word", 12, 20, 1, "1", null, false)); + assertToken(tokenList.get(0), new TokenInfo("whatever", null, "word", 12, 20, 1, new int[]{1}, null, false)); } @Test @@ -356,28 +356,28 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB List tokenList = indexPart.get("org.apache.lucene.analysis.core.WhitespaceTokenizer"); assertNotNull("Expcting WhitespaceTokenizer analysis breakdown", tokenList); assertEquals(4, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("hi,", null, "word", 0, 3, 1, "1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("3456-12", null, "word", 4, 11, 2, "2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("a", null, "word", 12, 13, 3, "3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("Test", null, "word", 14, 18, 4, "4", null, false)); - tokenList = indexPart.get("org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter"); + assertToken(tokenList.get(0), new TokenInfo("hi,", null, "word", 0, 3, 1, new int[]{1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("3456-12", null, "word", 4, 11, 2, new int[]{2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("a", null, "word", 12, 13, 3, new int[]{3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("Test", null, "word", 14, 18, 4, new int[]{4}, null, false)); + tokenList = indexPart.get("org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter"); assertNotNull("Expcting WordDelimiterFilter analysis breakdown", tokenList); assertEquals(6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, "1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("3456", null, "word", 4, 8, 2, "2/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("12", null, "word", 9, 11, 3, "2/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("345612", null, "word", 4, 11, 3, "2/3", null, false)); - assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, "3/4", null, false)); - assertToken(tokenList.get(5), new TokenInfo("Test", null, "word", 14, 18, 5, "4/5", null, false)); + assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, new int[]{1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("3456", null, "word", 4, 8, 2, new int[]{2,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("12", null, "word", 9, 11, 3, new int[]{2,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("345612", null, "word", 4, 11, 3, new int[]{2,3}, null, false)); + assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, new int[]{3,4}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("Test", null, "word", 14, 18, 5, new int[]{4,5}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList); assertEquals(6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, "1/1/1", null, false)); - assertToken(tokenList.get(1), new TokenInfo("3456", null, "word", 4, 8, 2, "2/2/2", null, false)); - assertToken(tokenList.get(2), new TokenInfo("12", null, "word", 9, 11, 3, "2/3/3", null, false)); - assertToken(tokenList.get(3), new TokenInfo("345612", null, "word", 4, 11, 3, "2/3/3", null, false)); - assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, "3/4/4", null, false)); - assertToken(tokenList.get(5), new TokenInfo("test", null, "word", 14, 18, 5, "4/5/5", null, false)); + assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, new int[]{1,1,1}, null, false)); + assertToken(tokenList.get(1), new TokenInfo("3456", null, "word", 4, 8, 2, new int[]{2,2,2}, null, false)); + assertToken(tokenList.get(2), new TokenInfo("12", null, "word", 9, 11, 3, new int[]{2,3,3}, null, false)); + assertToken(tokenList.get(3), new TokenInfo("345612", null, "word", 4, 11, 3, new int[]{2,3,3}, null, false)); + assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, new int[]{3,4,4}, null, false)); + assertToken(tokenList.get(5), new TokenInfo("test", null, "word", 14, 18, 5, new int[]{4,5,5}, null, false)); } }