From 674db8bd0ebd9dc23d6641c179f69013ac0c1fcd Mon Sep 17 00:00:00 2001 From: catalin-burcea Date: Wed, 31 Jul 2019 22:29:31 +0300 Subject: [PATCH] added StreamTokenizer examples --- .../streamtokenizer/StreamTokenizerDemo.java | 76 +++++++++++++++++++ .../resources/stream-tokenizer-example.txt | 3 + .../StreamTokenizerDemoUnitTest.java | 34 +++++++++ 3 files changed, 113 insertions(+) create mode 100644 java-strings-2/src/main/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemo.java create mode 100644 java-strings-2/src/main/resources/stream-tokenizer-example.txt create mode 100644 java-strings-2/src/test/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemoUnitTest.java diff --git a/java-strings-2/src/main/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemo.java b/java-strings-2/src/main/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemo.java new file mode 100644 index 0000000000..3bb0ff5b77 --- /dev/null +++ b/java-strings-2/src/main/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemo.java @@ -0,0 +1,76 @@ +package com.baeldung.string.streamtokenizer; + +import java.io.*; +import java.util.ArrayList; +import java.util.List; + +public class StreamTokenizerDemo { + + private static final String INPUT_FILE = "/stream-tokenizer-example.txt"; + private static final int QUOTE_CHARACTER = '\''; + private static final int DOUBLE_QUOTE_CHARACTER = '"'; + + public static List streamTokenizerWithDefaultConfiguration(Reader reader) throws IOException { + StreamTokenizer streamTokenizer = new StreamTokenizer(reader); + List tokens = new ArrayList<>(); + + int currentToken = streamTokenizer.nextToken(); + while (currentToken != StreamTokenizer.TT_EOF) { + + if (streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) { + tokens.add(streamTokenizer.nval); + } else if (streamTokenizer.ttype == StreamTokenizer.TT_WORD + || streamTokenizer.ttype == QUOTE_CHARACTER + || streamTokenizer.ttype == DOUBLE_QUOTE_CHARACTER) { + tokens.add(streamTokenizer.sval); + } else { + tokens.add((char) currentToken); + } + + currentToken = streamTokenizer.nextToken(); + } + + return tokens; + } + + public static List streamTokenizerWithCustomConfiguration(Reader reader) throws IOException { + StreamTokenizer streamTokenizer = new StreamTokenizer(reader); + List tokens = new ArrayList<>(); + + streamTokenizer.wordChars('!', '-'); + streamTokenizer.ordinaryChar('/'); + streamTokenizer.commentChar('#'); + streamTokenizer.eolIsSignificant(true); + + int currentToken = streamTokenizer.nextToken(); + while (currentToken != StreamTokenizer.TT_EOF) { + + if (streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) { + tokens.add(streamTokenizer.nval); + } else if (streamTokenizer.ttype == StreamTokenizer.TT_WORD + || streamTokenizer.ttype == QUOTE_CHARACTER + || streamTokenizer.ttype == DOUBLE_QUOTE_CHARACTER) { + tokens.add(streamTokenizer.sval); + } else { + tokens.add((char) currentToken); + } + currentToken = streamTokenizer.nextToken(); + } + + return tokens; + } + + public static Reader createReaderFromFile() throws FileNotFoundException { + String inputFile = StreamTokenizerDemo.class.getResource(INPUT_FILE).getFile(); + return new FileReader(inputFile); + } + + public static void main(String[] args) throws IOException { + List tokens1 = streamTokenizerWithDefaultConfiguration(createReaderFromFile()); + List tokens2 = streamTokenizerWithCustomConfiguration(createReaderFromFile()); + + System.out.println(tokens1); + System.out.println(tokens2); + } + +} \ No newline at end of file diff --git a/java-strings-2/src/main/resources/stream-tokenizer-example.txt b/java-strings-2/src/main/resources/stream-tokenizer-example.txt new file mode 100644 index 0000000000..6efe4fdc81 --- /dev/null +++ b/java-strings-2/src/main/resources/stream-tokenizer-example.txt @@ -0,0 +1,3 @@ +3 quick brown foxes jump over the "lazy" dog! +#test1 +//test2 \ No newline at end of file diff --git a/java-strings-2/src/test/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemoUnitTest.java b/java-strings-2/src/test/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemoUnitTest.java new file mode 100644 index 0000000000..01091ec629 --- /dev/null +++ b/java-strings-2/src/test/java/com/baeldung/string/streamtokenizer/StreamTokenizerDemoUnitTest.java @@ -0,0 +1,34 @@ +package com.baeldung.string.streamtokenizer; + +import org.junit.Test; + +import java.io.IOException; +import java.io.Reader; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.assertArrayEquals; + +public class StreamTokenizerDemoUnitTest { + + @Test + public void whenStreamTokenizerWithDefaultConfigurationIsCalled_ThenCorrectTokensAreReturned() throws IOException { + Reader reader = StreamTokenizerDemo.createReaderFromFile(); + List expectedTokens = Arrays.asList(3.0, "quick", "brown", "foxes", "jump", "over", "the", "lazy", "dog", '!', '#', "test1"); + + List actualTokens = StreamTokenizerDemo.streamTokenizerWithDefaultConfiguration(reader); + + assertArrayEquals(expectedTokens.toArray(), actualTokens.toArray()); + } + + @Test + public void whenStreamTokenizerWithCustomConfigurationIsCalled_ThenCorrectTokensAreReturned() throws IOException { + Reader reader = StreamTokenizerDemo.createReaderFromFile(); + List expectedTokens = Arrays.asList(3.0, "quick", "brown", "foxes", "jump", "over", "the", "\"lazy\"", "dog!", '\n', '\n', '/', '/', "test2"); + + List actualTokens = StreamTokenizerDemo.streamTokenizerWithCustomConfiguration(reader); + + assertArrayEquals(expectedTokens.toArray(), actualTokens.toArray()); + } + +}