upgrade to lucene 5 snapshot

This commit is contained in:
Robert Muir 2014-11-05 17:44:21 -05:00
parent 1247d55e06
commit 684e91b701
3 changed files with 14 additions and 9 deletions
pom.xml
src/main/java/org/elasticsearch

11
pom.xml
View File

@ -33,7 +33,8 @@
<properties>
<elasticsearch.version>2.0.0-SNAPSHOT</elasticsearch.version>
<lucene.version>4.10.2</lucene.version>
<lucene.version>5.0.0</lucene.version>
<lucene.maven.version>5.0.0-snapshot-1636426</lucene.maven.version>
<tests.jvms>1</tests.jvms>
<tests.shuffle>true</tests.shuffle>
<tests.output>onerror</tests.output>
@ -46,6 +47,10 @@
<id>sonatype</id>
<url>http://oss.sonatype.org/content/repositories/releases/</url>
</repository>
<repository>
<id>Lucene snapshots</id>
<url>https://download.elasticsearch.org/lucenesnapshots/maven/</url>
</repository>
</repositories>
<dependencies>
@ -64,7 +69,7 @@
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-test-framework</artifactId>
<version>${lucene.version}</version>
<version>${lucene.maven.version}</version>
<scope>test</scope>
</dependency>
@ -78,7 +83,7 @@
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-smartcn</artifactId>
<version>${lucene.version}</version>
<version>${lucene.maven.version}</version>
<scope>compile</scope>
</dependency>

View File

@ -37,7 +37,7 @@ public class SmartChineseTokenizerTokenizerFactory extends AbstractTokenizerFact
}
@Override
public Tokenizer create(Reader reader) {
return new HMMChineseTokenizer(reader);
public Tokenizer create() {
return new HMMChineseTokenizer();
}
}

View File

@ -52,8 +52,8 @@ public class SmartChineseIndicesAnalysis extends AbstractComponent {
}
@Override
public Tokenizer create(Reader reader) {
return new HMMChineseTokenizer(reader);
public Tokenizer create() {
return new HMMChineseTokenizer();
}
}));
@ -65,8 +65,8 @@ public class SmartChineseIndicesAnalysis extends AbstractComponent {
}
@Override
public Tokenizer create(Reader reader) {
return new HMMChineseTokenizer(reader);
public Tokenizer create() {
return new HMMChineseTokenizer();
}
}));