[OLINGO-568] Refactored SearchParser for further development

This commit is contained in:
mibo 2015-11-12 23:08:35 +01:00
parent a8d63fbee2
commit 40962a9a18
4 changed files with 105 additions and 65 deletions

View File

@ -21,23 +21,23 @@ package org.apache.olingo.server.core.uri.parser.search;
import org.apache.olingo.server.api.uri.queryoption.SearchOption; import org.apache.olingo.server.api.uri.queryoption.SearchOption;
import org.apache.olingo.server.api.uri.queryoption.search.SearchBinaryOperatorKind; import org.apache.olingo.server.api.uri.queryoption.search.SearchBinaryOperatorKind;
import org.apache.olingo.server.api.uri.queryoption.search.SearchExpression; import org.apache.olingo.server.api.uri.queryoption.search.SearchExpression;
import org.apache.olingo.server.api.uri.queryoption.search.SearchTerm;
import org.apache.olingo.server.core.uri.queryoption.SearchOptionImpl; import org.apache.olingo.server.core.uri.queryoption.SearchOptionImpl;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
public class SearchParser { public class SearchParser {
protected Iterator<SearchQueryToken> tokens; private Iterator<SearchQueryToken> tokens;
protected SearchExpression root; private SearchExpression root;
// private SearchQueryToken currentToken; private SearchQueryToken token;
public SearchOption parse(String path, String value) { public SearchOption parse(String path, String value) {
SearchTokenizer tokenizer = new SearchTokenizer(); SearchTokenizer tokenizer = new SearchTokenizer();
try { try {
tokens = tokenizer.tokenize(value).iterator(); tokens = tokenizer.tokenize(value).iterator();
// currentToken = tokens.next(); nextToken();
root = processTokens(); root = processSearchExpression(null);
} catch (SearchTokenizerException e) { } catch (SearchTokenizerException e) {
return null; return null;
} }
@ -46,81 +46,98 @@ public class SearchParser {
return searchOption; return searchOption;
} }
protected SearchExpression processTokens() { protected SearchExpression parseInternal(List<SearchQueryToken> tokens) {
SearchQueryToken token = nextToken(); this.tokens = tokens.iterator();
nextToken();
return processSearchExpression(null);
}
private SearchExpression processSearchExpression(SearchExpression left) {
if(token == null) {
return left;
}
if(token.getToken() == SearchQueryToken.Token.OPEN) { if(token.getToken() == SearchQueryToken.Token.OPEN) {
processOpen();
throw illegalState();
} else if(token.getToken() == SearchQueryToken.Token.CLOSE) {
processClose();
throw illegalState(); throw illegalState();
} else if(token.getToken() == SearchQueryToken.Token.NOT) { } else if(token.getToken() == SearchQueryToken.Token.NOT) {
return processNot(); processNot();
} else if(token.getToken() == SearchQueryToken.Token.PHRASE || } else if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) { token.getToken() == SearchQueryToken.Token.WORD) {
return processTerm(token); return processSearchExpression(processTerm());
// } else if(token.getToken() == SearchQueryToken.Token.AND) { } else if(token.getToken() == SearchQueryToken.Token.AND) {
// return processAnd(); SearchExpression se = processAnd(left);
return processSearchExpression(se);
} else if(token.getToken() == SearchQueryToken.Token.OR) {
return processOr(left);
} else { } else {
throw illegalState(); throw illegalState();
} }
}
private SearchExpression processAnd(SearchExpression se) {
SearchQueryToken token = nextToken();
if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) {
// SearchExpression t = processTerm(token);
return new SearchBinaryImpl(se, SearchBinaryOperatorKind.AND, processTerm(token));
}
throw illegalState(); throw illegalState();
} }
private SearchExpression processOr(SearchExpression se) { private void processClose() {
SearchQueryToken token = nextToken(); nextToken();
if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) {
return new SearchBinaryImpl(se, SearchBinaryOperatorKind.OR, processTerm(token));
} }
throw illegalState();
private void processOpen() {
nextToken();
}
private SearchExpression processAnd(SearchExpression left) {
nextToken();
SearchExpression se = processTerm();
return new SearchBinaryImpl(left, SearchBinaryOperatorKind.AND, se);
}
public SearchExpression processOr(SearchExpression left) {
nextToken();
SearchExpression se = processSearchExpression(left);
return new SearchBinaryImpl(left, SearchBinaryOperatorKind.OR, se);
} }
private RuntimeException illegalState() { private RuntimeException illegalState() {
return new RuntimeException(); return new RuntimeException();
} }
private SearchUnaryImpl processNot() { private void processNot() {
SearchQueryToken token = nextToken(); nextToken();
if(token.getToken() == SearchQueryToken.Token.PHRASE ||
token.getToken() == SearchQueryToken.Token.WORD) {
throw illegalState();
// return new SearchUnaryImpl(processTerm(token));
}
throw illegalState();
} }
private SearchQueryToken nextToken() { private void nextToken() {
// if(tokens.hasNext()) { if(tokens.hasNext()) {
return tokens.next(); token = tokens.next();
// } } else {
token = null;
}
// return null; // return null;
} }
private SearchExpression processTerm(SearchQueryToken token) { private SearchExpression processTerm() {
SearchTerm searchTerm = new SearchTermImpl(token.getLiteral()); if(token.getToken() == SearchQueryToken.Token.NOT) {
if(isEof()) { return new SearchUnaryImpl(processPhrase());
return searchTerm; }
if(token.getToken() == SearchQueryToken.Token.PHRASE) {
return processPhrase();
}
if(token.getToken() == SearchQueryToken.Token.WORD) {
return processWord();
}
return null;
} }
SearchQueryToken next = nextToken(); private SearchTermImpl processWord() {
if(next.getToken() == SearchQueryToken.Token.AND) { String literal = token.getLiteral();
return processAnd(searchTerm); nextToken();
} else if(next.getToken() == SearchQueryToken.Token.OR) { return new SearchTermImpl(literal);
return processOr(searchTerm);
} }
throw illegalState(); private SearchTermImpl processPhrase() {
} String literal = token.getLiteral();
nextToken();
private boolean isEof() { return new SearchTermImpl(literal);
return !tokens.hasNext();
} }
} }

View File

@ -34,12 +34,15 @@ public class SearchParserAndTokenizerTest {
@Test @Test
public void basicParsing() throws SearchTokenizerException { public void basicParsing() throws SearchTokenizerException {
// SearchExpressionValidator.init("a AND b OR c").enableLogging()
// .validate(with("a"));
SearchExpressionValidator.init("a") SearchExpressionValidator.init("a")
.validate(with("a")); .validate(with("a"));
SearchExpressionValidator.init("a AND b") SearchExpressionValidator.init("a AND b")
.validate(with("a", and("b"))); .validate(with("a", and("b")));
SearchExpressionValidator.init("a AND b AND c") SearchExpressionValidator.init("a AND b AND c")
.validate(with("a", and("b", and("c")))); .validate("{{'a' AND 'b'} AND 'c'}");
SearchExpressionValidator.init("a OR b") SearchExpressionValidator.init("a OR b")
.validate(with("a", or("b"))); .validate(with("a", or("b")));
SearchExpressionValidator.init("a OR b OR c") SearchExpressionValidator.init("a OR b OR c")
@ -47,10 +50,11 @@ public class SearchParserAndTokenizerTest {
} }
@Test @Test
@Ignore("Currently not working")
public void mixedParsing() throws Exception { public void mixedParsing() throws Exception {
SearchExpressionValidator.init("a AND b OR c") SearchExpressionValidator.init("a AND b OR c")
.validate(with("c", or("a", and("b")))); .validate("{{'a' AND 'b'} OR 'c'}");
SearchExpressionValidator.init("a OR b AND c")
.validate("{'a' OR {'b' AND 'c'}}");
} }
@Ignore @Ignore
@ -156,15 +160,25 @@ public class SearchParserAndTokenizerTest {
} }
private void validate(SearchExpression expectedSearchExpression) throws SearchTokenizerException { private void validate(SearchExpression expectedSearchExpression) throws SearchTokenizerException {
final SearchExpression searchExpression = getSearchExpression();
Assert.assertEquals(expectedSearchExpression.toString(), searchExpression.toString());
}
private void validate(String expectedSearchExpression) throws SearchTokenizerException {
final SearchExpression searchExpression = getSearchExpression();
Assert.assertEquals(expectedSearchExpression, searchExpression.toString());
}
private SearchExpression getSearchExpression() {
SearchParser tokenizer = new SearchParser(); SearchParser tokenizer = new SearchParser();
SearchOption result = tokenizer.parse(null, searchQuery); SearchOption result = tokenizer.parse(null, searchQuery);
Assert.assertNotNull(result); Assert.assertNotNull(result);
final SearchExpression searchExpression = result.getSearchExpression(); final SearchExpression searchExpression = result.getSearchExpression();
Assert.assertNotNull(searchExpression); Assert.assertNotNull(searchExpression);
if (log) { if (log) {
System.out.println(expectedSearchExpression); System.out.println(searchExpression);
} }
Assert.assertEquals(expectedSearchExpression.toString(), searchExpression.toString()); return searchExpression;
} }
} }

View File

@ -145,11 +145,20 @@ public class SearchParserTest extends SearchParser {
assertEquals("{{'word1' AND 'word2'} AND 'word3'}", se.toString()); assertEquals("{{'word1' AND 'word2'} AND 'word3'}", se.toString());
} }
@Test
public void combinationAndOr() {
//word1 AND word2 OR word3
SearchExpression se = run(Token.WORD, Token.AND, Token.WORD, Token.OR, Token.WORD);
assertEquals("{{'word1' AND 'word2'} OR 'word3'}", se.toString());
//word1 OR word2 AND word3
se = run(Token.WORD, Token.OR, Token.WORD, Token.AND, Token.WORD);
assertEquals("{'word1' OR {'word2' AND 'word3'}}", se.toString());
}
private SearchExpression run(SearchQueryToken.Token... tokenArray) { private SearchExpression run(SearchQueryToken.Token... tokenArray) {
List<SearchQueryToken> tokenList = prepareTokens(tokenArray); List<SearchQueryToken> tokenList = prepareTokens(tokenArray);
tokens = tokenList.iterator(); SearchExpression se = parseInternal(tokenList);
SearchExpression se = processTokens();
assertNotNull(se); assertNotNull(se);
return se; return se;
} }