mirror of
https://github.com/apache/commons-lang.git
synced 2025-02-11 20:45:05 +00:00
Add method to get all tokens as a list as well as an array
git-svn-id: https://svn.apache.org/repos/asf/jakarta/commons/proper/lang/trunk@424596 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9cbf70d822
commit
b7b7d7c935
@ -446,15 +446,29 @@ public String previousToken() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a copy of the full token list.
|
||||
* Gets a copy of the full token list as an independent modifiable array.
|
||||
*
|
||||
* @return the tokens as a String array
|
||||
*/
|
||||
public String[] getAllTokens() {
|
||||
public String[] getTokenArray() {
|
||||
tokenize();
|
||||
return (String[]) tokens.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a copy of the full token list as an independent modifiable list.
|
||||
*
|
||||
* @return the tokens as a String array
|
||||
*/
|
||||
public List getTokenList() {
|
||||
tokenize();
|
||||
List list = new ArrayList(tokens.length);
|
||||
for (int i = 0; i < tokens.length; i++) {
|
||||
list.add(tokens[i]);
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets this tokenizer, forgetting all parsing and iteration already completed.
|
||||
* <p>
|
||||
|
@ -16,6 +16,8 @@
|
||||
|
||||
package org.apache.commons.lang.text;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import junit.framework.Test;
|
||||
@ -70,7 +72,7 @@ public void test1() {
|
||||
tok.setQuoteChar('"');
|
||||
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
|
||||
tok.setIgnoreEmptyTokens(false);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "b", "c", "d;\"e", "f", "", "", "",};
|
||||
|
||||
@ -90,7 +92,7 @@ public void test2() {
|
||||
tok.setQuoteChar('"');
|
||||
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
|
||||
tok.setIgnoreEmptyTokens(false);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "b", "c ", "d;\"e", "f", " ", " ", "",};
|
||||
|
||||
@ -110,7 +112,7 @@ public void test3() {
|
||||
tok.setQuoteChar('"');
|
||||
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
|
||||
tok.setIgnoreEmptyTokens(false);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "b", " c", "d;\"e", "f", " ", " ", "",};
|
||||
|
||||
@ -130,7 +132,7 @@ public void test4() {
|
||||
tok.setQuoteChar('"');
|
||||
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
|
||||
tok.setIgnoreEmptyTokens(true);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "b", "c", "d;\"e", "f",};
|
||||
|
||||
@ -151,7 +153,7 @@ public void test5() {
|
||||
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
|
||||
tok.setIgnoreEmptyTokens(false);
|
||||
tok.setEmptyTokenAsNull(true);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "b", "c", "d;\"e", "f", null, null, null,};
|
||||
|
||||
@ -172,7 +174,7 @@ public void test6() {
|
||||
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
|
||||
tok.setIgnoreEmptyTokens(false);
|
||||
// tok.setTreatingEmptyAsNull(true);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "b", " c", "d;\"e", "f", null, null, null,};
|
||||
|
||||
@ -206,7 +208,7 @@ public void test7() {
|
||||
tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
|
||||
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
|
||||
tok.setIgnoreEmptyTokens(false);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "", "", "b", "c", "d e", "f", "",};
|
||||
|
||||
@ -226,7 +228,7 @@ public void test8() {
|
||||
tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
|
||||
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
|
||||
tok.setIgnoreEmptyTokens(true);
|
||||
String tokens[] = tok.getAllTokens();
|
||||
String tokens[] = tok.getTokenArray();
|
||||
|
||||
String expected[] = new String[]{"a", "b", "c", "d e", "f",};
|
||||
|
||||
@ -482,6 +484,17 @@ public void testBasicIgnoreTrimmed4() {
|
||||
assertEquals(false, tok.hasNext());
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------
|
||||
public void testListArray() {
|
||||
String input = "a b c";
|
||||
StrTokenizer tok = new StrTokenizer(input);
|
||||
String[] array = tok.getTokenArray();
|
||||
List list = tok.getTokenList();
|
||||
|
||||
assertEquals(Arrays.asList(array), list);
|
||||
assertEquals(3, list.size());
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------
|
||||
public void testCSV(String data) {
|
||||
this.testXSVAbc(StrTokenizer.getCSVInstance(data));
|
||||
|
Loading…
x
Reference in New Issue
Block a user