[OLINGO-568] Refactored SearchParser for further development

This commit is contained in:
mibo 2015-11-12 23:08:35 +01:00
parent a8d63fbee2
commit 40962a9a18
4 changed files with 105 additions and 65 deletions

View File

@ -21,23 +21,23 @@ package org.apache.olingo.server.core.uri.parser.search;
import org.apache.olingo.server.api.uri.queryoption.SearchOption;
import org.apache.olingo.server.api.uri.queryoption.search.SearchBinaryOperatorKind;
import org.apache.olingo.server.api.uri.queryoption.search.SearchExpression;
import org.apache.olingo.server.api.uri.queryoption.search.SearchTerm;
import org.apache.olingo.server.core.uri.queryoption.SearchOptionImpl;
import java.util.Iterator;
import java.util.List;
public class SearchParser {
protected Iterator<SearchQueryToken> tokens;
protected SearchExpression root;
// private SearchQueryToken currentToken;
private Iterator<SearchQueryToken> tokens;
private SearchExpression root;
private SearchQueryToken token;
public SearchOption parse(String path, String value) {
SearchTokenizer tokenizer = new SearchTokenizer();
try {
tokens = tokenizer.tokenize(value).iterator();
// currentToken = tokens.next();
root = processTokens();
nextToken();
root = processSearchExpression(null);
} catch (SearchTokenizerException e) {
return null;
}
@ -46,81 +46,98 @@ public class SearchParser {
return searchOption;
}
protected SearchExpression processTokens() {
SearchQueryToken token = nextToken();
protected SearchExpression parseInternal(List<SearchQueryToken> tokens) {
this.tokens = tokens.iterator();
nextToken();
return processSearchExpression(null);
}
private SearchExpression processSearchExpression(SearchExpression left) {
if(token == null) {
return left;
}
if(token.getToken() == SearchQueryToken.Token.OPEN) {
processOpen();
throw illegalState();
} else if(token.getToken() == SearchQueryToken.Token.CLOSE) {
processClose();
throw illegalState();
} else if(token.getToken() == SearchQueryToken.Token.NOT) {
return processNot();
processNot();
} else if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) {
return processTerm(token);
// } else if(token.getToken() == SearchQueryToken.Token.AND) {
// return processAnd();
return processSearchExpression(processTerm());
} else if(token.getToken() == SearchQueryToken.Token.AND) {
SearchExpression se = processAnd(left);
return processSearchExpression(se);
} else if(token.getToken() == SearchQueryToken.Token.OR) {
return processOr(left);
} else {
throw illegalState();
}
}
private SearchExpression processAnd(SearchExpression se) {
SearchQueryToken token = nextToken();
if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) {
// SearchExpression t = processTerm(token);
return new SearchBinaryImpl(se, SearchBinaryOperatorKind.AND, processTerm(token));
}
throw illegalState();
}
private SearchExpression processOr(SearchExpression se) {
SearchQueryToken token = nextToken();
if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) {
return new SearchBinaryImpl(se, SearchBinaryOperatorKind.OR, processTerm(token));
private void processClose() {
nextToken();
}
throw illegalState();
private void processOpen() {
nextToken();
}
private SearchExpression processAnd(SearchExpression left) {
nextToken();
SearchExpression se = processTerm();
return new SearchBinaryImpl(left, SearchBinaryOperatorKind.AND, se);
}
public SearchExpression processOr(SearchExpression left) {
nextToken();
SearchExpression se = processSearchExpression(left);
return new SearchBinaryImpl(left, SearchBinaryOperatorKind.OR, se);
}
private RuntimeException illegalState() {
return new RuntimeException();
}
private SearchUnaryImpl processNot() {
SearchQueryToken token = nextToken();
if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) {
throw illegalState();
// return new SearchUnaryImpl(processTerm(token));
}
throw illegalState();
private void processNot() {
nextToken();
}
private SearchQueryToken nextToken() {
// if(tokens.hasNext()) {
return tokens.next();
// }
private void nextToken() {
if(tokens.hasNext()) {
token = tokens.next();
} else {
token = null;
}
// return null;
}
private SearchExpression processTerm(SearchQueryToken token) {
SearchTerm searchTerm = new SearchTermImpl(token.getLiteral());
if(isEof()) {
return searchTerm;
private SearchExpression processTerm() {
if(token.getToken() == SearchQueryToken.Token.NOT) {
return new SearchUnaryImpl(processPhrase());
}
if(token.getToken() == SearchQueryToken.Token.PHRASE) {
return processPhrase();
}
if(token.getToken() == SearchQueryToken.Token.WORD) {
return processWord();
}
return null;
}
SearchQueryToken next = nextToken();
if(next.getToken() == SearchQueryToken.Token.AND) {
return processAnd(searchTerm);
} else if(next.getToken() == SearchQueryToken.Token.OR) {
return processOr(searchTerm);
private SearchTermImpl processWord() {
String literal = token.getLiteral();
nextToken();
return new SearchTermImpl(literal);
}
throw illegalState();
}
private boolean isEof() {
return !tokens.hasNext();
private SearchTermImpl processPhrase() {
String literal = token.getLiteral();
nextToken();
return new SearchTermImpl(literal);
}
}

View File

@ -34,12 +34,15 @@ public class SearchParserAndTokenizerTest {
@Test
public void basicParsing() throws SearchTokenizerException {
// SearchExpressionValidator.init("a AND b OR c").enableLogging()
// .validate(with("a"));
SearchExpressionValidator.init("a")
.validate(with("a"));
SearchExpressionValidator.init("a AND b")
.validate(with("a", and("b")));
SearchExpressionValidator.init("a AND b AND c")
.validate(with("a", and("b", and("c"))));
.validate("{{'a' AND 'b'} AND 'c'}");
SearchExpressionValidator.init("a OR b")
.validate(with("a", or("b")));
SearchExpressionValidator.init("a OR b OR c")
@ -47,10 +50,11 @@ public class SearchParserAndTokenizerTest {
}
@Test
@Ignore("Currently not working")
public void mixedParsing() throws Exception {
SearchExpressionValidator.init("a AND b OR c")
.validate(with("c", or("a", and("b"))));
.validate("{{'a' AND 'b'} OR 'c'}");
SearchExpressionValidator.init("a OR b AND c")
.validate("{'a' OR {'b' AND 'c'}}");
}
@Ignore
@ -156,15 +160,25 @@ public class SearchParserAndTokenizerTest {
}
private void validate(SearchExpression expectedSearchExpression) throws SearchTokenizerException {
final SearchExpression searchExpression = getSearchExpression();
Assert.assertEquals(expectedSearchExpression.toString(), searchExpression.toString());
}
private void validate(String expectedSearchExpression) throws SearchTokenizerException {
final SearchExpression searchExpression = getSearchExpression();
Assert.assertEquals(expectedSearchExpression, searchExpression.toString());
}
private SearchExpression getSearchExpression() {
SearchParser tokenizer = new SearchParser();
SearchOption result = tokenizer.parse(null, searchQuery);
Assert.assertNotNull(result);
final SearchExpression searchExpression = result.getSearchExpression();
Assert.assertNotNull(searchExpression);
if (log) {
System.out.println(expectedSearchExpression);
System.out.println(searchExpression);
}
Assert.assertEquals(expectedSearchExpression.toString(), searchExpression.toString());
return searchExpression;
}
}

View File

@ -145,11 +145,20 @@ public class SearchParserTest extends SearchParser {
assertEquals("{{'word1' AND 'word2'} AND 'word3'}", se.toString());
}
@Test
public void combinationAndOr() {
//word1 AND word2 OR word3
SearchExpression se = run(Token.WORD, Token.AND, Token.WORD, Token.OR, Token.WORD);
assertEquals("{{'word1' AND 'word2'} OR 'word3'}", se.toString());
//word1 OR word2 AND word3
se = run(Token.WORD, Token.OR, Token.WORD, Token.AND, Token.WORD);
assertEquals("{'word1' OR {'word2' AND 'word3'}}", se.toString());
}
private SearchExpression run(SearchQueryToken.Token... tokenArray) {
List<SearchQueryToken> tokenList = prepareTokens(tokenArray);
tokens = tokenList.iterator();
SearchExpression se = processTokens();
SearchExpression se = parseInternal(tokenList);
assertNotNull(se);
return se;
}