lucene 4: extrace Lucene#readSegmentsInfo, and use it where applicable
This commit is contained in:
parent
0660e20c47
commit
e9f8d0c722
|
@ -23,6 +23,7 @@ import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||||
import org.apache.lucene.index.*;
|
import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -40,7 +41,7 @@ import java.lang.reflect.Field;
|
||||||
*/
|
*/
|
||||||
public class Lucene {
|
public class Lucene {
|
||||||
|
|
||||||
public static final Version VERSION = Version.LUCENE_36;
|
public static final Version VERSION = Version.LUCENE_40;
|
||||||
public static final Version ANALYZER_VERSION = VERSION;
|
public static final Version ANALYZER_VERSION = VERSION;
|
||||||
public static final Version QUERYPARSER_VERSION = VERSION;
|
public static final Version QUERYPARSER_VERSION = VERSION;
|
||||||
|
|
||||||
|
@ -57,6 +58,9 @@ public class Lucene {
|
||||||
if (version == null) {
|
if (version == null) {
|
||||||
return defaultVersion;
|
return defaultVersion;
|
||||||
}
|
}
|
||||||
|
if ("4.0".equals(version)) {
|
||||||
|
return Version.LUCENE_40;
|
||||||
|
}
|
||||||
if ("3.6".equals(version)) {
|
if ("3.6".equals(version)) {
|
||||||
return Version.LUCENE_36;
|
return Version.LUCENE_36;
|
||||||
}
|
}
|
||||||
|
@ -82,6 +86,27 @@ public class Lucene {
|
||||||
return defaultVersion;
|
return defaultVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the segments infos, returning null if it doesn't exists
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public static SegmentInfos readSegmentInfosIfExists(Directory directory) {
|
||||||
|
try {
|
||||||
|
return readSegmentInfos(directory);
|
||||||
|
} catch (IOException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the segments infos, failing if it fails to load
|
||||||
|
*/
|
||||||
|
public static SegmentInfos readSegmentInfos(Directory directory) throws IOException {
|
||||||
|
final SegmentInfos sis = new SegmentInfos();
|
||||||
|
sis.read(directory);
|
||||||
|
return sis;
|
||||||
|
}
|
||||||
|
|
||||||
public static long count(IndexSearcher searcher, Query query) throws IOException {
|
public static long count(IndexSearcher searcher, Query query) throws IOException {
|
||||||
TotalHitCountCollector countCollector = new TotalHitCountCollector();
|
TotalHitCountCollector countCollector = new TotalHitCountCollector();
|
||||||
// we don't need scores, so wrap it in a constant score query
|
// we don't need scores, so wrap it in a constant score query
|
||||||
|
|
|
@ -23,9 +23,6 @@ import com.google.common.collect.Lists;
|
||||||
import org.apache.lucene.index.*;
|
import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.store.Directory;
|
|
||||||
import org.apache.lucene.store.IOContext;
|
|
||||||
import org.apache.lucene.util.IOUtils;
|
|
||||||
import org.elasticsearch.ElasticSearchException;
|
import org.elasticsearch.ElasticSearchException;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
@ -244,7 +241,7 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine {
|
||||||
// commit on a just opened writer will commit even if there are no changes done to it
|
// commit on a just opened writer will commit even if there are no changes done to it
|
||||||
// we rely on that for the commit data translog id key
|
// we rely on that for the commit data translog id key
|
||||||
if (DirectoryReader.indexExists(store.directory())) {
|
if (DirectoryReader.indexExists(store.directory())) {
|
||||||
Map<String, String> commitUserData = getCommitUserData(store.directory());
|
Map<String, String> commitUserData = Lucene.readSegmentInfos(store.directory()).getUserData();
|
||||||
if (commitUserData.containsKey(Translog.TRANSLOG_ID_KEY)) {
|
if (commitUserData.containsKey(Translog.TRANSLOG_ID_KEY)) {
|
||||||
translogIdGenerator.set(Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY)));
|
translogIdGenerator.set(Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY)));
|
||||||
} else {
|
} else {
|
||||||
|
@ -859,7 +856,7 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine {
|
||||||
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
|
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
|
||||||
if (flush.force()) {
|
if (flush.force()) {
|
||||||
// if we force, we might not have committed, we need to check that its the same id
|
// if we force, we might not have committed, we need to check that its the same id
|
||||||
Map<String, String> commitUserData = getCommitUserData(store.directory());
|
Map<String, String> commitUserData = Lucene.readSegmentInfos(store.directory()).getUserData();
|
||||||
long committedTranslogId = Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY));
|
long committedTranslogId = Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY));
|
||||||
if (committedTranslogId != translogId) {
|
if (committedTranslogId != translogId) {
|
||||||
// we did not commit anything, revert to the old translog
|
// we did not commit anything, revert to the old translog
|
||||||
|
@ -1529,13 +1526,4 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine {
|
||||||
return searcher;
|
return searcher;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Reads the latest commit and loads the userdata
|
|
||||||
*/
|
|
||||||
private static final Map<String, String> getCommitUserData(final Directory directory) throws IOException {
|
|
||||||
final SegmentInfos sis = new SegmentInfos();
|
|
||||||
sis.read(directory);
|
|
||||||
return sis.getUserData();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.gateway.blobstore;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IOContext;
|
import org.apache.lucene.store.IOContext;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.blobstore.*;
|
||||||
import org.elasticsearch.common.io.FastByteArrayInputStream;
|
import org.elasticsearch.common.io.FastByteArrayInputStream;
|
||||||
import org.elasticsearch.common.io.FastByteArrayOutputStream;
|
import org.elasticsearch.common.io.FastByteArrayOutputStream;
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamInput;
|
import org.elasticsearch.common.io.stream.BytesStreamInput;
|
||||||
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||||
import org.elasticsearch.common.lucene.store.ThreadSafeInputStreamIndexInput;
|
import org.elasticsearch.common.lucene.store.ThreadSafeInputStreamIndexInput;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -609,8 +610,8 @@ public abstract class BlobStoreIndexShardGateway extends AbstractIndexShardCompo
|
||||||
// read the gateway data persisted
|
// read the gateway data persisted
|
||||||
long version = -1;
|
long version = -1;
|
||||||
try {
|
try {
|
||||||
if (IndexReader.indexExists(store.directory())) {
|
if (DirectoryReader.indexExists(store.directory())) {
|
||||||
version = IndexReader.getCurrentVersion(store.directory());
|
version = Lucene.readSegmentInfos(store.directory()).getVersion();
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new IndexShardGatewayRecoveryException(shardId(), "Failed to fetch index version after copying it over", e);
|
throw new IndexShardGatewayRecoveryException(shardId(), "Failed to fetch index version after copying it over", e);
|
||||||
|
|
|
@ -20,9 +20,9 @@
|
||||||
package org.elasticsearch.index.gateway.local;
|
package org.elasticsearch.index.gateway.local;
|
||||||
|
|
||||||
import com.google.common.io.Closeables;
|
import com.google.common.io.Closeables;
|
||||||
import org.apache.lucene.index.IndexReader;
|
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.SegmentInfos;
|
||||||
import org.elasticsearch.ElasticSearchException;
|
import org.elasticsearch.ElasticSearchException;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||||
|
@ -49,7 +49,6 @@ import java.io.EOFException;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.ScheduledFuture;
|
import java.util.concurrent.ScheduledFuture;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -101,12 +100,12 @@ public class LocalIndexShardGateway extends AbstractIndexShardComponent implemen
|
||||||
long version = -1;
|
long version = -1;
|
||||||
long translogId = -1;
|
long translogId = -1;
|
||||||
try {
|
try {
|
||||||
if (IndexReader.indexExists(indexShard.store().directory())) {
|
SegmentInfos si = Lucene.readSegmentInfosIfExists(indexShard.store().directory());
|
||||||
|
if (si != null) {
|
||||||
if (indexShouldExists) {
|
if (indexShouldExists) {
|
||||||
version = IndexReader.getCurrentVersion(indexShard.store().directory());
|
version = si.getVersion();
|
||||||
Map<String, String> commitUserData = IndexReader.getCommitUserData(indexShard.store().directory());
|
if (si.getUserData().containsKey(Translog.TRANSLOG_ID_KEY)) {
|
||||||
if (commitUserData.containsKey(Translog.TRANSLOG_ID_KEY)) {
|
translogId = Long.parseLong(si.getUserData().get(Translog.TRANSLOG_ID_KEY));
|
||||||
translogId = Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY));
|
|
||||||
} else {
|
} else {
|
||||||
translogId = version;
|
translogId = version;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue