49 lines
1.7 KiB
Java
Raw Normal View History

package com.baeldung.stringtokenizer;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.StringTokenizer;
2017-04-07 18:15:19 +02:00
import java.util.stream.Collectors;
public class MyTokenizer {
2017-04-07 18:15:19 +02:00
public List<String> getTokens(String str) {
List<String> tokens = new ArrayList<>();
2017-04-07 18:15:19 +02:00
// StringTokenizer tokenizer = new StringTokenizer( str );
StringTokenizer tokenizer = new StringTokenizer(str, ",");
// StringTokenizer tokenizer = new StringTokenizer( str , "," , true );
while (tokenizer.hasMoreElements()) {
tokens.add(tokenizer.nextToken());
// tokens.add( tokenizer.nextToken("e") );
}
int tokenLength = tokens.size();
return tokens;
}
public List<String> getTokensWithCollection(String str) {
2017-08-05 16:14:20 +03:00
return Collections.list(new StringTokenizer(str, ",")).stream().map(token -> (String) token).collect(Collectors.toList());
2017-04-07 18:15:19 +02:00
}
public List<String> getTokensFromFile(String path, String delim) {
List<String> tokens = new ArrayList<>();
String currLine;
2017-04-07 18:15:19 +02:00
StringTokenizer tokenizer;
try (BufferedReader br = new BufferedReader(new InputStreamReader(MyTokenizer.class.getResourceAsStream("/" + path)))) {
2017-04-07 18:15:19 +02:00
while ((currLine = br.readLine()) != null) {
tokenizer = new StringTokenizer(currLine, delim);
while (tokenizer.hasMoreElements()) {
tokens.add(tokenizer.nextToken());
}
}
} catch (IOException e) {
e.printStackTrace();
}
return tokens;
}
}