added StreamTokenizer examples

This commit is contained in:
catalin-burcea 2019-07-31 22:29:31 +03:00
parent ba31134cfd
commit 674db8bd0e
3 changed files with 113 additions and 0 deletions

View File

@ -0,0 +1,76 @@
package com.baeldung.string.streamtokenizer;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
public class StreamTokenizerDemo {
private static final String INPUT_FILE = "/stream-tokenizer-example.txt";
private static final int QUOTE_CHARACTER = '\'';
private static final int DOUBLE_QUOTE_CHARACTER = '"';
public static List<Object> streamTokenizerWithDefaultConfiguration(Reader reader) throws IOException {
StreamTokenizer streamTokenizer = new StreamTokenizer(reader);
List<Object> tokens = new ArrayList<>();
int currentToken = streamTokenizer.nextToken();
while (currentToken != StreamTokenizer.TT_EOF) {
if (streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) {
tokens.add(streamTokenizer.nval);
} else if (streamTokenizer.ttype == StreamTokenizer.TT_WORD
|| streamTokenizer.ttype == QUOTE_CHARACTER
|| streamTokenizer.ttype == DOUBLE_QUOTE_CHARACTER) {
tokens.add(streamTokenizer.sval);
} else {
tokens.add((char) currentToken);
}
currentToken = streamTokenizer.nextToken();
}
return tokens;
}
public static List<Object> streamTokenizerWithCustomConfiguration(Reader reader) throws IOException {
StreamTokenizer streamTokenizer = new StreamTokenizer(reader);
List<Object> tokens = new ArrayList<>();
streamTokenizer.wordChars('!', '-');
streamTokenizer.ordinaryChar('/');
streamTokenizer.commentChar('#');
streamTokenizer.eolIsSignificant(true);
int currentToken = streamTokenizer.nextToken();
while (currentToken != StreamTokenizer.TT_EOF) {
if (streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) {
tokens.add(streamTokenizer.nval);
} else if (streamTokenizer.ttype == StreamTokenizer.TT_WORD
|| streamTokenizer.ttype == QUOTE_CHARACTER
|| streamTokenizer.ttype == DOUBLE_QUOTE_CHARACTER) {
tokens.add(streamTokenizer.sval);
} else {
tokens.add((char) currentToken);
}
currentToken = streamTokenizer.nextToken();
}
return tokens;
}
public static Reader createReaderFromFile() throws FileNotFoundException {
String inputFile = StreamTokenizerDemo.class.getResource(INPUT_FILE).getFile();
return new FileReader(inputFile);
}
public static void main(String[] args) throws IOException {
List<Object> tokens1 = streamTokenizerWithDefaultConfiguration(createReaderFromFile());
List<Object> tokens2 = streamTokenizerWithCustomConfiguration(createReaderFromFile());
System.out.println(tokens1);
System.out.println(tokens2);
}
}

View File

@ -0,0 +1,3 @@
3 quick brown foxes jump over the "lazy" dog!
#test1
//test2

View File

@ -0,0 +1,34 @@
package com.baeldung.string.streamtokenizer;
import org.junit.Test;
import java.io.IOException;
import java.io.Reader;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertArrayEquals;
public class StreamTokenizerDemoUnitTest {
@Test
public void whenStreamTokenizerWithDefaultConfigurationIsCalled_ThenCorrectTokensAreReturned() throws IOException {
Reader reader = StreamTokenizerDemo.createReaderFromFile();
List<Object> expectedTokens = Arrays.asList(3.0, "quick", "brown", "foxes", "jump", "over", "the", "lazy", "dog", '!', '#', "test1");
List<Object> actualTokens = StreamTokenizerDemo.streamTokenizerWithDefaultConfiguration(reader);
assertArrayEquals(expectedTokens.toArray(), actualTokens.toArray());
}
@Test
public void whenStreamTokenizerWithCustomConfigurationIsCalled_ThenCorrectTokensAreReturned() throws IOException {
Reader reader = StreamTokenizerDemo.createReaderFromFile();
List<Object> expectedTokens = Arrays.asList(3.0, "quick", "brown", "foxes", "jump", "over", "the", "\"lazy\"", "dog!", '\n', '\n', '/', '/', "test2");
List<Object> actualTokens = StreamTokenizerDemo.streamTokenizerWithCustomConfiguration(reader);
assertArrayEquals(expectedTokens.toArray(), actualTokens.toArray());
}
}