LUCENE-3672: reduce reliance on timestamps

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1237497 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-01-29 22:38:32 +00:00
parent 1aef3f9fef
commit 2e5be2f75c
32 changed files with 138 additions and 322 deletions

View File

@ -748,6 +748,10 @@ Changes in backwards compatibility policy
* LUCENE-3712: Removed unused and untested ReaderUtil#subReader methods. * LUCENE-3712: Removed unused and untested ReaderUtil#subReader methods.
(Uwe Schindler) (Uwe Schindler)
* LUCENE-3672: Deprecate Directory.fileModified and
IndexCommit.getTimestamp and .getVersion. (Andrzej Bialecki, Robert
Muir, Mike McCandless)
Security fixes Security fixes

View File

@ -425,7 +425,6 @@ final class DirectoryReader extends BaseMultiReader<SegmentReader> {
Collection<String> files; Collection<String> files;
Directory dir; Directory dir;
long generation; long generation;
long version;
final Map<String,String> userData; final Map<String,String> userData;
private final int segmentCount; private final int segmentCount;
@ -434,7 +433,6 @@ final class DirectoryReader extends BaseMultiReader<SegmentReader> {
this.dir = dir; this.dir = dir;
userData = infos.getUserData(); userData = infos.getUserData();
files = Collections.unmodifiableCollection(infos.files(dir, true)); files = Collections.unmodifiableCollection(infos.files(dir, true));
version = infos.getVersion();
generation = infos.getGeneration(); generation = infos.getGeneration();
segmentCount = infos.size(); segmentCount = infos.size();
} }
@ -464,11 +462,6 @@ final class DirectoryReader extends BaseMultiReader<SegmentReader> {
return dir; return dir;
} }
@Override
public long getVersion() {
return version;
}
@Override @Override
public long getGeneration() { public long getGeneration() {
return generation; return generation;

View File

@ -83,39 +83,31 @@ public abstract class IndexCommit implements Comparable<IndexCommit> {
public boolean equals(Object other) { public boolean equals(Object other) {
if (other instanceof IndexCommit) { if (other instanceof IndexCommit) {
IndexCommit otherCommit = (IndexCommit) other; IndexCommit otherCommit = (IndexCommit) other;
return otherCommit.getDirectory().equals(getDirectory()) && otherCommit.getVersion() == getVersion(); return otherCommit.getDirectory().equals(getDirectory()) && otherCommit.getGeneration() == getGeneration();
} else } else {
return false; return false;
}
} }
@Override @Override
public int hashCode() { public int hashCode() {
return (int) (getDirectory().hashCode() + getVersion()); return getDirectory().hashCode() + Long.valueOf(getGeneration()).hashCode();
} }
/** Returns the version for this IndexCommit. This is the
* same value that {@link IndexReader#getVersion} would
* return if it were opened on this commit. */
public abstract long getVersion();
/** Returns the generation (the _N in segments_N) for this /** Returns the generation (the _N in segments_N) for this
* IndexCommit */ * IndexCommit */
public abstract long getGeneration(); public abstract long getGeneration();
/** Convenience method that returns the last modified time
* of the segments_N file corresponding to this index
* commit, equivalent to
* getDirectory().fileModified(getSegmentsFileName()). */
public long getTimestamp() throws IOException {
return getDirectory().fileModified(getSegmentsFileName());
}
/** Returns userData, previously passed to {@link /** Returns userData, previously passed to {@link
* IndexWriter#commit(Map)} for this commit. Map is * IndexWriter#commit(Map)} for this commit. Map is
* String -> String. */ * String -> String. */
public abstract Map<String,String> getUserData() throws IOException; public abstract Map<String,String> getUserData() throws IOException;
public int compareTo(IndexCommit commit) { public int compareTo(IndexCommit commit) {
if (getDirectory() != commit.getDirectory()) {
throw new UnsupportedOperationException("cannot compare IndexCommits from different Directory instances");
}
long gen = getGeneration(); long gen = getGeneration();
long comgen = commit.getGeneration(); long comgen = commit.getGeneration();
if (gen < comgen) { if (gen < comgen) {
@ -126,5 +118,4 @@ public abstract class IndexCommit implements Comparable<IndexCommit> {
return 0; return 0;
} }
} }
} }

View File

@ -655,7 +655,6 @@ final class IndexFileDeleter {
boolean deleted; boolean deleted;
Directory directory; Directory directory;
Collection<CommitPoint> commitsToDelete; Collection<CommitPoint> commitsToDelete;
long version;
long generation; long generation;
final Map<String,String> userData; final Map<String,String> userData;
private final int segmentCount; private final int segmentCount;
@ -665,7 +664,6 @@ final class IndexFileDeleter {
this.commitsToDelete = commitsToDelete; this.commitsToDelete = commitsToDelete;
userData = segmentInfos.getUserData(); userData = segmentInfos.getUserData();
segmentsFileName = segmentInfos.getCurrentSegmentFileName(); segmentsFileName = segmentInfos.getCurrentSegmentFileName();
version = segmentInfos.getVersion();
generation = segmentInfos.getGeneration(); generation = segmentInfos.getGeneration();
files = Collections.unmodifiableCollection(segmentInfos.files(directory, true)); files = Collections.unmodifiableCollection(segmentInfos.files(directory, true));
segmentCount = segmentInfos.size(); segmentCount = segmentInfos.size();
@ -696,11 +694,6 @@ final class IndexFileDeleter {
return directory; return directory;
} }
@Override
public long getVersion() {
return version;
}
@Override @Override
public long getGeneration() { public long getGeneration() {
return generation; return generation;

View File

@ -468,36 +468,6 @@ public abstract class IndexReader implements Closeable {
throw new UnsupportedOperationException("This reader does not support this method."); throw new UnsupportedOperationException("This reader does not support this method.");
} }
/**
* Returns the time the index in the named directory was last modified.
* Do not use this to check whether the reader is still up-to-date, use
* {@link #isCurrent()} instead.
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static long lastModified(final Directory directory2) throws CorruptIndexException, IOException {
return ((Long) new SegmentInfos.FindSegmentsFile(directory2) {
@Override
public Object doBody(String segmentFileName) throws IOException {
return Long.valueOf(directory2.fileModified(segmentFileName));
}
}.run()).longValue();
}
/**
* Reads version number from segments files. The version number is
* initialized with a timestamp and then increased by one for each change of
* the index.
*
* @param directory where the index resides.
* @return version number.
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static long getCurrentVersion(Directory directory) throws CorruptIndexException, IOException {
return SegmentInfos.readCurrentVersion(directory);
}
/** /**
* Reads commitUserData, previously passed to {@link * Reads commitUserData, previously passed to {@link
* IndexWriter#commit(Map)}, from current index * IndexWriter#commit(Map)}, from current index
@ -525,18 +495,7 @@ public abstract class IndexReader implements Closeable {
* a reader based on a Directory), then this method * a reader based on a Directory), then this method
* returns the version recorded in the commit that the * returns the version recorded in the commit that the
* reader opened. This version is advanced every time * reader opened. This version is advanced every time
* {@link IndexWriter#commit} is called.</p> * a change is made with {@link IndexWriter}.</p>
*
* <p>If instead this reader is a near real-time reader
* (ie, obtained by a call to {@link
* IndexWriter#getReader}, or by calling {@link #openIfChanged}
* on a near real-time reader), then this method returns
* the version of the last commit done by the writer.
* Note that even as further changes are made with the
* writer, the version will not changed until a commit is
* completed. Thus, you should not rely on this method to
* determine when a near real-time reader should be
* opened. Use {@link #isCurrent} instead.</p>
* *
* @throws UnsupportedOperationException unless overridden in subclass * @throws UnsupportedOperationException unless overridden in subclass
*/ */

View File

@ -94,16 +94,15 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
* Whenever you add a new format, make it 1 smaller (negative version logic)! */ * Whenever you add a new format, make it 1 smaller (negative version logic)! */
public static final int FORMAT_SEGMENTS_GEN_CURRENT = -2; public static final int FORMAT_SEGMENTS_GEN_CURRENT = -2;
public int counter = 0; // used to name new segments public int counter; // used to name new segments
/** /**
* counts how often the index has been changed by adding or deleting docs. * counts how often the index has been changed
* starting with the current time in milliseconds forces to create unique version numbers.
*/ */
public long version = System.currentTimeMillis(); public long version;
private long generation = 0; // generation of the "segments_N" for the next commit private long generation; // generation of the "segments_N" for the next commit
private long lastGeneration = 0; // generation of the "segments_N" file we last successfully read private long lastGeneration; // generation of the "segments_N" file we last successfully read
// or wrote; this is normally the same as generation except if // or wrote; this is normally the same as generation except if
// there was an IOException that had interrupted a commit // there was an IOException that had interrupted a commit

View File

@ -125,11 +125,6 @@ public class SnapshotDeletionPolicy implements IndexDeletionPolicy {
return cp.getUserData(); return cp.getUserData();
} }
@Override
public long getVersion() {
return cp.getVersion();
}
@Override @Override
public boolean isDeleted() { public boolean isDeleted() {
return cp.isDeleted(); return cp.isDeleted();

View File

@ -249,14 +249,6 @@ public final class CompoundFileDirectory extends Directory {
return entries.containsKey(IndexFileNames.stripSegmentName(name)); return entries.containsKey(IndexFileNames.stripSegmentName(name));
} }
/** Returns the time the compound file was last modified. */
@Override
public long fileModified(String name) throws IOException {
ensureOpen();
return directory.fileModified(fileName);
}
/** Not implemented /** Not implemented
* @throws UnsupportedOperationException */ * @throws UnsupportedOperationException */
@Override @Override

View File

@ -62,10 +62,6 @@ public abstract class Directory implements Closeable {
public abstract boolean fileExists(String name) public abstract boolean fileExists(String name)
throws IOException; throws IOException;
/** Returns the time the named file was last modified. */
public abstract long fileModified(String name)
throws IOException;
/** Removes an existing file in the directory. */ /** Removes an existing file in the directory. */
public abstract void deleteFile(String name) public abstract void deleteFile(String name)
throws IOException; throws IOException;

View File

@ -250,14 +250,6 @@ public abstract class FSDirectory extends Directory {
return file.exists(); return file.exists();
} }
/** Returns the time the named file was last modified. */
@Override
public long fileModified(String name) {
ensureOpen();
File file = new File(directory, name);
return file.lastModified();
}
/** Returns the time the named file was last modified. */ /** Returns the time the named file was last modified. */
public static long fileModified(File directory, String name) { public static long fileModified(File directory, String name) {
File file = new File(directory, name); File file = new File(directory, name);

View File

@ -137,11 +137,6 @@ public class FileSwitchDirectory extends Directory {
return getDirectory(name).fileExists(name); return getDirectory(name).fileExists(name);
} }
@Override
public long fileModified(String name) throws IOException {
return getDirectory(name).fileModified(name);
}
@Override @Override
public void deleteFile(String name) throws IOException { public void deleteFile(String name) throws IOException {
getDirectory(name).deleteFile(name); getDirectory(name).deleteFile(name);

View File

@ -152,15 +152,6 @@ public class NRTCachingDirectory extends Directory {
return cache.fileExists(name) || delegate.fileExists(name); return cache.fileExists(name) || delegate.fileExists(name);
} }
@Override
public synchronized long fileModified(String name) throws IOException {
if (cache.fileExists(name)) {
return cache.fileModified(name);
} else {
return delegate.fileModified(name);
}
}
@Override @Override
public synchronized void deleteFile(String name) throws IOException { public synchronized void deleteFile(String name) throws IOException {
if (VERBOSE) { if (VERBOSE) {

View File

@ -98,19 +98,6 @@ public class RAMDirectory extends Directory {
return fileMap.containsKey(name); return fileMap.containsKey(name);
} }
/** Returns the time the named file was last modified.
* @throws IOException if the file does not exist
*/
@Override
public final long fileModified(String name) throws IOException {
ensureOpen();
RAMFile file = fileMap.get(name);
if (file == null) {
throw new FileNotFoundException(name);
}
return file.getLastModified();
}
/** Returns the length in bytes of a file in the directory. /** Returns the length in bytes of a file in the directory.
* @throws IOException if the file does not exist * @throws IOException if the file does not exist
*/ */

View File

@ -26,8 +26,6 @@ public class RAMFile {
RAMDirectory directory; RAMDirectory directory;
protected long sizeInBytes; protected long sizeInBytes;
private long lastModified = System.currentTimeMillis();
// File used as buffer, in no RAMDirectory // File used as buffer, in no RAMDirectory
public RAMFile() {} public RAMFile() {}
@ -44,15 +42,6 @@ public class RAMFile {
this.length = length; this.length = length;
} }
// For non-stream access from thread that might be concurrent with writing
public synchronized long getLastModified() {
return lastModified;
}
protected synchronized void setLastModified(long lastModified) {
this.lastModified = lastModified;
}
protected final byte[] addBuffer(int size) { protected final byte[] addBuffer(int size) {
byte[] buffer = newBuffer(size); byte[] buffer = newBuffer(size);
synchronized(this) { synchronized(this) {

View File

@ -167,7 +167,6 @@ public class RAMOutputStream extends IndexOutput {
@Override @Override
public void flush() throws IOException { public void flush() throws IOException {
file.setLastModified(System.currentTimeMillis());
setFileLength(); setFileLength();
} }

View File

@ -702,12 +702,6 @@ public class MockDirectoryWrapper extends Directory {
return delegate.fileExists(name); return delegate.fileExists(name);
} }
@Override
public synchronized long fileModified(String name) throws IOException {
maybeYield();
return delegate.fileModified(name);
}
@Override @Override
public synchronized long fileLength(String name) throws IOException { public synchronized long fileLength(String name) throws IOException {
maybeYield(); maybeYield();

View File

@ -224,14 +224,6 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
return realDirectory.fileLength(name); return realDirectory.fileLength(name);
} }
/**
* {@inheritDoc}
*/
@Override
public long fileModified(String name) throws IOException {
return realDirectory.fileModified(name);
}
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */

View File

@ -18,10 +18,12 @@ package org.apache.lucene.index;
*/ */
import java.io.IOException; import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.Collection;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
@ -32,7 +34,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -47,20 +48,12 @@ public class TestDeletionPolicy extends LuceneTestCase {
final IndexCommit firstCommit = commits.get(0); final IndexCommit firstCommit = commits.get(0);
long last = SegmentInfos.generationFromSegmentsFileName(firstCommit.getSegmentsFileName()); long last = SegmentInfos.generationFromSegmentsFileName(firstCommit.getSegmentsFileName());
assertEquals(last, firstCommit.getGeneration()); assertEquals(last, firstCommit.getGeneration());
long lastVersion = firstCommit.getVersion();
long lastTimestamp = firstCommit.getTimestamp();
for(int i=1;i<commits.size();i++) { for(int i=1;i<commits.size();i++) {
final IndexCommit commit = commits.get(i); final IndexCommit commit = commits.get(i);
long now = SegmentInfos.generationFromSegmentsFileName(commit.getSegmentsFileName()); long now = SegmentInfos.generationFromSegmentsFileName(commit.getSegmentsFileName());
long nowVersion = commit.getVersion();
long nowTimestamp = commit.getTimestamp();
assertTrue("SegmentInfos commits are out-of-order", now > last); assertTrue("SegmentInfos commits are out-of-order", now > last);
assertTrue("SegmentInfos versions are out-of-order", nowVersion > lastVersion);
assertTrue("SegmentInfos timestamps are out-of-order: now=" + nowTimestamp + " vs last=" + lastTimestamp, nowTimestamp >= lastTimestamp);
assertEquals(now, commit.getGeneration()); assertEquals(now, commit.getGeneration());
last = now; last = now;
lastVersion = nowVersion;
lastTimestamp = nowTimestamp;
} }
} }
@ -158,6 +151,10 @@ public class TestDeletionPolicy extends LuceneTestCase {
} }
} }
static long getCommitTime(IndexCommit commit) throws IOException {
return Long.parseLong(commit.getUserData().get("commitTime"));
}
/* /*
* Delete a commit only when it has been obsoleted by N * Delete a commit only when it has been obsoleted by N
* seconds. * seconds.
@ -184,10 +181,10 @@ public class TestDeletionPolicy extends LuceneTestCase {
IndexCommit lastCommit = commits.get(commits.size()-1); IndexCommit lastCommit = commits.get(commits.size()-1);
// Any commit older than expireTime should be deleted: // Any commit older than expireTime should be deleted:
double expireTime = dir.fileModified(lastCommit.getSegmentsFileName())/1000.0 - expirationTimeSeconds; double expireTime = getCommitTime(lastCommit)/1000.0 - expirationTimeSeconds;
for (final IndexCommit commit : commits) { for (final IndexCommit commit : commits) {
double modTime = dir.fileModified(commit.getSegmentsFileName())/1000.0; double modTime = getCommitTime(commit)/1000.0;
if (commit != lastCommit && modTime < expireTime) { if (commit != lastCommit && modTime < expireTime) {
commit.delete(); commit.delete();
numDelete += 1; numDelete += 1;
@ -213,6 +210,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
((LogMergePolicy) mp).setUseCompoundFile(true); ((LogMergePolicy) mp).setUseCompoundFile(true);
} }
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
Map<String,String> commitData = new HashMap<String,String>();
commitData.put("commitTime", String.valueOf(System.currentTimeMillis()));
writer.commit(commitData);
writer.close(); writer.close();
final int ITER = 9; final int ITER = 9;
@ -233,6 +233,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int j=0;j<17;j++) { for(int j=0;j<17;j++) {
addDoc(writer); addDoc(writer);
} }
commitData = new HashMap<String,String>();
commitData.put("commitTime", String.valueOf(System.currentTimeMillis()));
writer.commit(commitData);
writer.close(); writer.close();
if (i < ITER-1) { if (i < ITER-1) {
@ -269,7 +272,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
// if we are on a filesystem that seems to have only // if we are on a filesystem that seems to have only
// 1 second resolution, allow +1 second in commit // 1 second resolution, allow +1 second in commit
// age tolerance: // age tolerance:
long modTime = dir.fileModified(fileName); SegmentInfos sis = new SegmentInfos();
sis.read(dir, fileName);
long modTime = Long.parseLong(sis.getUserData().get("commitTime"));
oneSecondResolution &= (modTime % 1000) == 0; oneSecondResolution &= (modTime % 1000) == 0;
final long leeway = (long) ((SECONDS + (oneSecondResolution ? 1.0:0.0))*1000); final long leeway = (long) ((SECONDS + (oneSecondResolution ? 1.0:0.0))*1000);

View File

@ -126,10 +126,6 @@ public class TestFieldsReader extends LuceneTestCase {
return fsDir.fileExists(name); return fsDir.fileExists(name);
} }
@Override @Override
public long fileModified(String name) throws IOException {
return fsDir.fileModified(name);
}
@Override
public void deleteFile(String name) throws IOException { public void deleteFile(String name) throws IOException {
fsDir.deleteFile(name); fsDir.deleteFile(name);
} }

View File

@ -34,12 +34,10 @@ public class TestIndexCommit extends LuceneTestCase {
IndexCommit ic1 = new IndexCommit() { IndexCommit ic1 = new IndexCommit() {
@Override public String getSegmentsFileName() { return "a"; } @Override public String getSegmentsFileName() { return "a"; }
@Override public long getVersion() { return 12; }
@Override public Directory getDirectory() { return dir; } @Override public Directory getDirectory() { return dir; }
@Override public Collection<String> getFileNames() throws IOException { return null; } @Override public Collection<String> getFileNames() throws IOException { return null; }
@Override public void delete() {} @Override public void delete() {}
@Override public long getGeneration() { return 0; } @Override public long getGeneration() { return 0; }
@Override public long getTimestamp() throws IOException { return 1;}
@Override public Map<String, String> getUserData() throws IOException { return null; } @Override public Map<String, String> getUserData() throws IOException { return null; }
@Override public boolean isDeleted() { return false; } @Override public boolean isDeleted() { return false; }
@Override public int getSegmentCount() { return 2; } @Override public int getSegmentCount() { return 2; }
@ -47,12 +45,10 @@ public class TestIndexCommit extends LuceneTestCase {
IndexCommit ic2 = new IndexCommit() { IndexCommit ic2 = new IndexCommit() {
@Override public String getSegmentsFileName() { return "b"; } @Override public String getSegmentsFileName() { return "b"; }
@Override public long getVersion() { return 12; }
@Override public Directory getDirectory() { return dir; } @Override public Directory getDirectory() { return dir; }
@Override public Collection<String> getFileNames() throws IOException { return null; } @Override public Collection<String> getFileNames() throws IOException { return null; }
@Override public void delete() {} @Override public void delete() {}
@Override public long getGeneration() { return 0; } @Override public long getGeneration() { return 0; }
@Override public long getTimestamp() throws IOException { return 1;}
@Override public Map<String, String> getUserData() throws IOException { return null; } @Override public Map<String, String> getUserData() throws IOException { return null; }
@Override public boolean isDeleted() { return false; } @Override public boolean isDeleted() { return false; }
@Override public int getSegmentCount() { return 2; } @Override public int getSegmentCount() { return 2; }

View File

@ -381,60 +381,6 @@ public class TestIndexReader extends LuceneTestCase {
_TestUtil.rmDir(dirFile); _TestUtil.rmDir(dirFile);
} }
public void testLastModified() throws Exception {
for(int i=0;i<2;i++) {
final Directory dir = newDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
assertTrue(IndexReader.indexExists(dir));
IndexReader reader = IndexReader.open(dir);
assertFalse(IndexWriter.isLocked(dir)); // reader only, no lock
long version = IndexReader.lastModified(dir);
if (i == 1) {
long version2 = IndexReader.lastModified(dir);
assertEquals(version, version2);
}
reader.close();
// modify index and check version has been
// incremented:
Thread.sleep(1000);
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir);
assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));
reader.close();
dir.close();
}
}
public void testVersion() throws IOException {
Directory dir = newDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
addDocumentWithFields(writer);
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
assertTrue(IndexReader.indexExists(dir));
IndexReader reader = IndexReader.open(dir);
assertFalse(IndexWriter.isLocked(dir)); // reader only, no lock
long version = IndexReader.getCurrentVersion(dir);
reader.close();
// modify index and check version has been
// incremented:
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir);
assertTrue("old version is " + version + "; new version is " + IndexReader.getCurrentVersion(dir), version < IndexReader.getCurrentVersion(dir));
reader.close();
dir.close();
}
public void testOpenReaderAfterDelete() throws IOException { public void testOpenReaderAfterDelete() throws IOException {
File dirFile = _TestUtil.getTempDir("deletetest"); File dirFile = _TestUtil.getTempDir("deletetest");
Directory dir = newFSDirectory(dirFile); Directory dir = newFSDirectory(dirFile);

View File

@ -348,12 +348,6 @@ public class TestBufferedIndexInput extends LuceneTestCase {
dir.deleteFile(name); dir.deleteFile(name);
} }
@Override @Override
public long fileModified(String name)
throws IOException
{
return dir.fileModified(name);
}
@Override
public boolean fileExists(String name) public boolean fileExists(String name)
throws IOException throws IOException
{ {

View File

@ -19,6 +19,7 @@ package org.apache.solr.core;
import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexDeletionPolicy; import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.solr.update.SolrIndexWriter;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
@ -65,13 +66,13 @@ public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
/** /**
* Set the duration for which commit point is to be reserved by the deletion policy. * Set the duration for which commit point is to be reserved by the deletion policy.
* *
* @param indexVersion version of the commit point to be reserved * @param indexGen gen of the commit point to be reserved
* @param reserveTime time in milliseconds for which the commit point is to be reserved * @param reserveTime time in milliseconds for which the commit point is to be reserved
*/ */
public void setReserveDuration(Long indexVersion, long reserveTime) { public void setReserveDuration(Long indexGen, long reserveTime) {
long timeToSet = System.currentTimeMillis() + reserveTime; long timeToSet = System.currentTimeMillis() + reserveTime;
for(;;) { for(;;) {
Long previousTime = reserves.put(indexVersion, timeToSet); Long previousTime = reserves.put(indexGen, timeToSet);
// this is the common success case: the older time didn't exist, or // this is the common success case: the older time didn't exist, or
// came before the new time. // came before the new time.
@ -103,19 +104,19 @@ public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
/** Permanently prevent this commit point from being deleted. /** Permanently prevent this commit point from being deleted.
* A counter is used to allow a commit point to be correctly saved and released * A counter is used to allow a commit point to be correctly saved and released
* multiple times. */ * multiple times. */
public synchronized void saveCommitPoint(Long indexCommitVersion) { public synchronized void saveCommitPoint(Long indexCommitGen) {
AtomicInteger reserveCount = savedCommits.get(indexCommitVersion); AtomicInteger reserveCount = savedCommits.get(indexCommitGen);
if (reserveCount == null) reserveCount = new AtomicInteger(); if (reserveCount == null) reserveCount = new AtomicInteger();
reserveCount.incrementAndGet(); reserveCount.incrementAndGet();
savedCommits.put(indexCommitVersion, reserveCount); savedCommits.put(indexCommitGen, reserveCount);
} }
/** Release a previously saved commit point */ /** Release a previously saved commit point */
public synchronized void releaseCommitPoint(Long indexCommitVersion) { public synchronized void releaseCommitPoint(Long indexCommitGen) {
AtomicInteger reserveCount = savedCommits.get(indexCommitVersion); AtomicInteger reserveCount = savedCommits.get(indexCommitGen);
if (reserveCount == null) return;// this should not happen if (reserveCount == null) return;// this should not happen
if (reserveCount.decrementAndGet() <= 0) { if (reserveCount.decrementAndGet() <= 0) {
savedCommits.remove(indexCommitVersion); savedCommits.remove(indexCommitGen);
} }
} }
@ -165,10 +166,10 @@ public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
@Override @Override
public void delete() { public void delete() {
Long version = delegate.getVersion(); Long gen = delegate.getGeneration();
Long reserve = reserves.get(version); Long reserve = reserves.get(gen);
if (reserve != null && System.currentTimeMillis() < reserve) return; if (reserve != null && System.currentTimeMillis() < reserve) return;
if(savedCommits.containsKey(version)) return; if(savedCommits.containsKey(gen)) return;
delegate.delete(); delegate.delete();
} }
@ -187,11 +188,6 @@ public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
return delegate.hashCode(); return delegate.hashCode();
} }
@Override
public long getVersion() {
return delegate.getVersion();
}
@Override @Override
public long getGeneration() { public long getGeneration() {
return delegate.getGeneration(); return delegate.getGeneration();
@ -202,11 +198,6 @@ public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
return delegate.isDeleted(); return delegate.isDeleted();
} }
@Override
public long getTimestamp() throws IOException {
return delegate.getTimestamp();
}
@Override @Override
public Map getUserData() throws IOException { public Map getUserData() throws IOException {
return delegate.getUserData(); return delegate.getUserData();
@ -214,11 +205,11 @@ public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
} }
/** /**
* @param version the version of the commit point * @param gen the gen of the commit point
* @return a commit point corresponding to the given version * @return a commit point corresponding to the given version
*/ */
public IndexCommit getCommitPoint(Long version) { public IndexCommit getCommitPoint(Long gen) {
return solrVersionVsCommits.get(version); return solrVersionVsCommits.get(gen);
} }
/** /**
@ -236,10 +227,20 @@ public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
Map<Long, IndexCommit> map = new ConcurrentHashMap<Long, IndexCommit>(); Map<Long, IndexCommit> map = new ConcurrentHashMap<Long, IndexCommit>();
for (IndexCommitWrapper wrapper : list) { for (IndexCommitWrapper wrapper : list) {
if (!wrapper.isDeleted()) if (!wrapper.isDeleted())
map.put(wrapper.getVersion(), wrapper.delegate); map.put(wrapper.delegate.getGeneration(), wrapper.delegate);
} }
solrVersionVsCommits = map; solrVersionVsCommits = map;
latestCommit = ((list.get(list.size() - 1)).delegate); latestCommit = ((list.get(list.size() - 1)).delegate);
} }
public static long getCommitTimestamp(IndexCommit commit) throws IOException {
final Map<String,String> commitData = commit.getUserData();
String commitTime = commitData.get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY);
if (commitTime != null) {
return Long.parseLong(commitTime);
} else {
return 0;
}
}
} }

View File

@ -87,7 +87,6 @@ public class SolrDeletionPolicy implements IndexDeletionPolicy, NamedListInitial
} }
sb.append(",segFN=").append(commit.getSegmentsFileName()); sb.append(",segFN=").append(commit.getSegmentsFileName());
sb.append(",version=").append(commit.getVersion());
sb.append(",generation=").append(commit.getGeneration()); sb.append(",generation=").append(commit.getGeneration());
sb.append(",filenames=").append(commit.getFileNames()); sb.append(",filenames=").append(commit.getFileNames());
} catch (Exception e) { } catch (Exception e) {
@ -133,7 +132,7 @@ public class SolrDeletionPolicy implements IndexDeletionPolicy, NamedListInitial
synchronized (this) { synchronized (this) {
long maxCommitAgeTimeStamp = -1L; long maxCommitAgeTimeStamp = -1L;
IndexCommit newest = commits.get(commits.size() - 1); IndexCommit newest = commits.get(commits.size() - 1);
log.info("newest commit = " + newest.getVersion()); log.info("newest commit = " + newest.getGeneration());
int singleSegKept = (newest.getSegmentCount() == 1) ? 1 : 0; int singleSegKept = (newest.getSegmentCount() == 1) ? 1 : 0;
int totalKept = 1; int totalKept = 1;
@ -149,7 +148,7 @@ public class SolrDeletionPolicy implements IndexDeletionPolicy, NamedListInitial
DateMathParser dmp = new DateMathParser(DateField.UTC, Locale.US); DateMathParser dmp = new DateMathParser(DateField.UTC, Locale.US);
maxCommitAgeTimeStamp = dmp.parseMath(maxCommitAge).getTime(); maxCommitAgeTimeStamp = dmp.parseMath(maxCommitAge).getTime();
} }
if (commit.getTimestamp() < maxCommitAgeTimeStamp) { if (IndexDeletionPolicyWrapper.getCommitTimestamp(commit) < maxCommitAgeTimeStamp) {
commit.delete(); commit.delete();
continue; continue;
} }
@ -191,8 +190,6 @@ public class SolrDeletionPolicy implements IndexDeletionPolicy, NamedListInitial
sb.append('/'); sb.append('/');
sb.append(commit.getGeneration()); sb.append(commit.getGeneration());
sb.append('_');
sb.append(commit.getVersion());
return sb.toString(); return sb.toString();
} }

View File

@ -60,6 +60,7 @@ import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.BinaryQueryResponseWriter; import org.apache.solr.response.BinaryQueryResponseWriter;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.update.SolrIndexWriter;
import org.apache.solr.util.NumberUtils; import org.apache.solr.util.NumberUtils;
import org.apache.solr.util.RefCounted; import org.apache.solr.util.RefCounted;
import org.apache.solr.util.plugin.SolrCoreAware; import org.apache.solr.util.plugin.SolrCoreAware;
@ -139,8 +140,8 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
// in a catastrophic failure, but will result in the client getting an empty file list for // in a catastrophic failure, but will result in the client getting an empty file list for
// the CMD_GET_FILE_LIST command. // the CMD_GET_FILE_LIST command.
// //
core.getDeletionPolicy().setReserveDuration(commitPoint.getVersion(), reserveCommitDuration); core.getDeletionPolicy().setReserveDuration(commitPoint.getGeneration(), reserveCommitDuration);
rsp.add(CMD_INDEX_VERSION, commitPoint.getVersion()); rsp.add(CMD_INDEX_VERSION, core.getDeletionPolicy().getCommitTimestamp(commitPoint));
rsp.add(GENERATION, commitPoint.getGeneration()); rsp.add(GENERATION, commitPoint.getGeneration());
} else { } else {
// This happens when replication is not configured to happen after startup and no commit/optimize // This happens when replication is not configured to happen after startup and no commit/optimize
@ -219,7 +220,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
for (IndexCommit c : commits.values()) { for (IndexCommit c : commits.values()) {
try { try {
NamedList<Object> nl = new NamedList<Object>(); NamedList<Object> nl = new NamedList<Object>();
nl.add("indexVersion", c.getVersion()); nl.add("indexVersion", core.getDeletionPolicy().getCommitTimestamp(c));
nl.add(GENERATION, c.getGeneration()); nl.add(GENERATION, c.getGeneration());
nl.add(CMD_GET_FILE_LIST, c.getFileNames()); nl.add(CMD_GET_FILE_LIST, c.getFileNames());
l.add(nl); l.add(nl);
@ -332,21 +333,21 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private void getFileList(SolrParams solrParams, SolrQueryResponse rsp) { private void getFileList(SolrParams solrParams, SolrQueryResponse rsp) {
String v = solrParams.get(CMD_INDEX_VERSION); String v = solrParams.get(GENERATION);
if (v == null) { if (v == null) {
rsp.add("status", "no indexversion specified"); rsp.add("status", "no index generation specified");
return; return;
} }
long version = Long.parseLong(v); long gen = Long.parseLong(v);
IndexCommit commit = core.getDeletionPolicy().getCommitPoint(version); IndexCommit commit = core.getDeletionPolicy().getCommitPoint(gen);
//System.out.println("ask for files for gen:" + commit.getGeneration() + core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName()); //System.out.println("ask for files for gen:" + commit.getGeneration() + core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName());
if (commit == null) { if (commit == null) {
rsp.add("status", "invalid indexversion"); rsp.add("status", "invalid index generation");
return; return;
} }
// reserve the indexcommit for sometime // reserve the indexcommit for sometime
core.getDeletionPolicy().setReserveDuration(version, reserveCommitDuration); core.getDeletionPolicy().setReserveDuration(gen, reserveCommitDuration);
List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(); List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
try { try {
//get all the files in the commit //get all the files in the commit
@ -359,10 +360,10 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
result.add(fileMeta); result.add(fileMeta);
} }
} catch (IOException e) { } catch (IOException e) {
rsp.add("status", "unable to get file names for given indexversion"); rsp.add("status", "unable to get file names for given index generation");
rsp.add("exception", e); rsp.add("exception", e);
LOG.warn("Unable to get file names for indexCommit version: " LOG.warn("Unable to get file names for indexCommit generation: "
+ version, e); + gen, e);
} }
rsp.add(CMD_GET_FILE_LIST, result); rsp.add(CMD_GET_FILE_LIST, result);
if (confFileNameAlias.size() < 1) if (confFileNameAlias.size() < 1)
@ -487,8 +488,13 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
long version[] = new long[2]; long version[] = new long[2];
RefCounted<SolrIndexSearcher> searcher = core.getSearcher(); RefCounted<SolrIndexSearcher> searcher = core.getSearcher();
try { try {
version[0] = searcher.get().getIndexReader().getIndexCommit().getVersion(); final IndexCommit commit = searcher.get().getIndexReader().getIndexCommit();
version[1] = searcher.get().getIndexReader().getIndexCommit().getGeneration(); final Map<String,String> commitData = commit.getUserData();
String commitTime = commitData.get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY);
if (commitTime != null) {
version[0] = Long.parseLong(commitTime);
}
version[1] = commit.getGeneration();
} catch (IOException e) { } catch (IOException e) {
LOG.warn("Unable to get index version : ", e); LOG.warn("Unable to get index version : ", e);
} finally { } finally {
@ -574,7 +580,6 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
} }
if (isMaster && commit != null) { if (isMaster && commit != null) {
master.add("replicatableIndexVersion", commit.getVersion());
master.add("replicatableGeneration", commit.getGeneration()); master.add("replicatableGeneration", commit.getGeneration());
} }
@ -846,7 +851,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
Collection<IndexCommit> commits = IndexReader.listCommits(reader.directory()); Collection<IndexCommit> commits = IndexReader.listCommits(reader.directory());
for (IndexCommit ic : commits) { for (IndexCommit ic : commits) {
if(ic.getSegmentCount() == 1){ if(ic.getSegmentCount() == 1){
if(indexCommitPoint == null || indexCommitPoint.getVersion() < ic.getVersion()) indexCommitPoint = ic; if(indexCommitPoint == null || indexCommitPoint.getGeneration() < ic.getGeneration()) indexCommitPoint = ic;
} }
} }
} else{ } else{
@ -857,7 +862,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
// always saves the last commit point (and the last optimized commit point, if needed) // always saves the last commit point (and the last optimized commit point, if needed)
/*** /***
if(indexCommitPoint != null){ if(indexCommitPoint != null){
core.getDeletionPolicy().saveCommitPoint(indexCommitPoint.getVersion()); core.getDeletionPolicy().saveCommitPoint(indexCommitPoint.getGeneration());
} }
***/ ***/
} }
@ -958,10 +963,10 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
// always saves the last commit point (and the last optimized commit point, if needed) // always saves the last commit point (and the last optimized commit point, if needed)
/*** /***
if (indexCommitPoint != null) { if (indexCommitPoint != null) {
core.getDeletionPolicy().saveCommitPoint(indexCommitPoint.getVersion()); core.getDeletionPolicy().saveCommitPoint(indexCommitPoint.getGeneration());
} }
if(oldCommitPoint != null){ if(oldCommitPoint != null){
core.getDeletionPolicy().releaseCommitPoint(oldCommitPoint.getVersion()); core.getDeletionPolicy().releaseCommitPoint(oldCommitPoint.getGeneration());
} }
***/ ***/
} }
@ -989,7 +994,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
private FastOutputStream fos; private FastOutputStream fos;
private Long indexVersion; private Long indexGen;
private IndexDeletionPolicyWrapper delPolicy; private IndexDeletionPolicyWrapper delPolicy;
public FileStream(SolrParams solrParams) { public FileStream(SolrParams solrParams) {
@ -1004,8 +1009,8 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
String sLen = params.get(LEN); String sLen = params.get(LEN);
String compress = params.get(COMPRESSION); String compress = params.get(COMPRESSION);
String sChecksum = params.get(CHECKSUM); String sChecksum = params.get(CHECKSUM);
String sindexVersion = params.get(CMD_INDEX_VERSION); String sGen = params.get(GENERATION);
if (sindexVersion != null) indexVersion = Long.parseLong(sindexVersion); if (sGen != null) indexGen = Long.parseLong(sGen);
if (Boolean.parseBoolean(compress)) { if (Boolean.parseBoolean(compress)) {
fos = new FastOutputStream(new DeflaterOutputStream(out)); fos = new FastOutputStream(new DeflaterOutputStream(out));
} else { } else {
@ -1063,9 +1068,9 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
} }
fos.write(buf, 0, (int) bytesRead); fos.write(buf, 0, (int) bytesRead);
fos.flush(); fos.flush();
if (indexVersion != null && (packetsWritten % 5 == 0)) { if (indexGen != null && (packetsWritten % 5 == 0)) {
//after every 5 packets reserve the commitpoint for some time //after every 5 packets reserve the commitpoint for some time
delPolicy.setReserveDuration(indexVersion, reserveCommitDuration); delPolicy.setReserveDuration(indexGen, reserveCommitDuration);
} }
packetsWritten++; packetsWritten++;
} }

View File

@ -28,6 +28,7 @@ import org.apache.solr.common.util.FileUtils;
import org.apache.solr.common.util.JavaBinCodec; import org.apache.solr.common.util.JavaBinCodec;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.core.IndexDeletionPolicyWrapper;
import static org.apache.solr.handler.ReplicationHandler.*; import static org.apache.solr.handler.ReplicationHandler.*;
import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.LocalSolrQueryRequest;
@ -210,10 +211,10 @@ public class SnapPuller {
/** /**
* Fetches the list of files in a given index commit point * Fetches the list of files in a given index commit point
*/ */
void fetchFileList(long version) throws IOException { void fetchFileList(long gen) throws IOException {
PostMethod post = new PostMethod(masterUrl); PostMethod post = new PostMethod(masterUrl);
post.addParameter(COMMAND, CMD_GET_FILE_LIST); post.addParameter(COMMAND, CMD_GET_FILE_LIST);
post.addParameter(CMD_INDEX_VERSION, String.valueOf(version)); post.addParameter(GENERATION, String.valueOf(gen));
post.addParameter("wt", "javabin"); post.addParameter("wt", "javabin");
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -225,7 +226,7 @@ public class SnapPuller {
filesToDownload = Collections.synchronizedList(f); filesToDownload = Collections.synchronizedList(f);
else { else {
filesToDownload = Collections.emptyList(); filesToDownload = Collections.emptyList();
LOG.error("No files to download for indexversion: "+ version); LOG.error("No files to download for index generation: "+ gen);
} }
f = nl.get(CONF_FILES); f = nl.get(CONF_FILES);
@ -274,7 +275,7 @@ public class SnapPuller {
} }
if (latestVersion == 0L) { if (latestVersion == 0L) {
if (force && commit.getVersion() != 0) { if (force && commit.getGeneration() != 0) {
// since we won't get the files for an empty index, // since we won't get the files for an empty index,
// we just clear ours and commit // we just clear ours and commit
core.getUpdateHandler().getSolrCoreState().getIndexWriter(core).deleteAll(); core.getUpdateHandler().getSolrCoreState().getIndexWriter(core).deleteAll();
@ -288,17 +289,17 @@ public class SnapPuller {
return true; return true;
} }
if (!force && commit.getVersion() == latestVersion && commit.getGeneration() == latestGeneration) { if (!force && IndexDeletionPolicyWrapper.getCommitTimestamp(commit) == latestVersion) {
//master and slave are already in sync just return //master and slave are already in sync just return
LOG.info("Slave in sync with master."); LOG.info("Slave in sync with master.");
successfulInstall = true; successfulInstall = true;
return true; return true;
} }
LOG.info("Master's version: " + latestVersion + ", generation: " + latestGeneration); LOG.info("Master's generation: " + latestGeneration);
LOG.info("Slave's version: " + commit.getVersion() + ", generation: " + commit.getGeneration()); LOG.info("Slave's generation: " + commit.getGeneration());
LOG.info("Starting replication process"); LOG.info("Starting replication process");
// get the list of files first // get the list of files first
fetchFileList(latestVersion); fetchFileList(latestGeneration);
// this can happen if the commit point is deleted before we fetch the file list. // this can happen if the commit point is deleted before we fetch the file list.
if(filesToDownload.isEmpty()) return false; if(filesToDownload.isEmpty()) return false;
LOG.info("Number of files in latest index in master: " + filesToDownload.size()); LOG.info("Number of files in latest index in master: " + filesToDownload.size());
@ -309,7 +310,7 @@ public class SnapPuller {
filesDownloaded = Collections.synchronizedList(new ArrayList<Map<String, Object>>()); filesDownloaded = Collections.synchronizedList(new ArrayList<Map<String, Object>>());
// if the generateion of master is older than that of the slave , it means they are not compatible to be copied // if the generateion of master is older than that of the slave , it means they are not compatible to be copied
// then a new index direcory to be created and all the files need to be copied // then a new index direcory to be created and all the files need to be copied
boolean isFullCopyNeeded = commit.getVersion() >= latestVersion || force; boolean isFullCopyNeeded = IndexDeletionPolicyWrapper.getCommitTimestamp(commit) >= latestVersion || force;
File tmpIndexDir = createTempindexDir(core); File tmpIndexDir = createTempindexDir(core);
if (isIndexStale()) if (isIndexStale())
isFullCopyNeeded = true; isFullCopyNeeded = true;
@ -318,11 +319,11 @@ public class SnapPuller {
File indexDir = null ; File indexDir = null ;
try { try {
indexDir = new File(core.getIndexDir()); indexDir = new File(core.getIndexDir());
downloadIndexFiles(isFullCopyNeeded, tmpIndexDir, latestVersion); downloadIndexFiles(isFullCopyNeeded, tmpIndexDir, latestGeneration);
LOG.info("Total time taken for download : " + ((System.currentTimeMillis() - replicationStartTime) / 1000) + " secs"); LOG.info("Total time taken for download : " + ((System.currentTimeMillis() - replicationStartTime) / 1000) + " secs");
Collection<Map<String, Object>> modifiedConfFiles = getModifiedConfFiles(confFilesToDownload); Collection<Map<String, Object>> modifiedConfFiles = getModifiedConfFiles(confFilesToDownload);
if (!modifiedConfFiles.isEmpty()) { if (!modifiedConfFiles.isEmpty()) {
downloadConfFiles(confFilesToDownload, latestVersion); downloadConfFiles(confFilesToDownload, latestGeneration);
if (isFullCopyNeeded) { if (isFullCopyNeeded) {
successfulInstall = modifyIndexProps(tmpIndexDir.getName()); successfulInstall = modifyIndexProps(tmpIndexDir.getName());
deleteTmpIdxDir = false; deleteTmpIdxDir = false;
@ -530,7 +531,7 @@ public class SnapPuller {
}.start(); }.start();
} }
private void downloadConfFiles(List<Map<String, Object>> confFilesToDownload, long latestVersion) throws Exception { private void downloadConfFiles(List<Map<String, Object>> confFilesToDownload, long latestGeneration) throws Exception {
LOG.info("Starting download of configuration files from master: " + confFilesToDownload); LOG.info("Starting download of configuration files from master: " + confFilesToDownload);
confFilesDownloaded = Collections.synchronizedList(new ArrayList<Map<String, Object>>()); confFilesDownloaded = Collections.synchronizedList(new ArrayList<Map<String, Object>>());
File tmpconfDir = new File(solrCore.getResourceLoader().getConfigDir(), "conf." + getDateAsStr(new Date())); File tmpconfDir = new File(solrCore.getResourceLoader().getConfigDir(), "conf." + getDateAsStr(new Date()));
@ -542,7 +543,7 @@ public class SnapPuller {
} }
for (Map<String, Object> file : confFilesToDownload) { for (Map<String, Object> file : confFilesToDownload) {
String saveAs = (String) (file.get(ALIAS) == null ? file.get(NAME) : file.get(ALIAS)); String saveAs = (String) (file.get(ALIAS) == null ? file.get(NAME) : file.get(ALIAS));
fileFetcher = new FileFetcher(tmpconfDir, file, saveAs, true, latestVersion); fileFetcher = new FileFetcher(tmpconfDir, file, saveAs, true, latestGeneration);
currentFile = file; currentFile = file;
fileFetcher.fetchFile(); fileFetcher.fetchFile();
confFilesDownloaded.add(new HashMap<String, Object>(file)); confFilesDownloaded.add(new HashMap<String, Object>(file));
@ -561,13 +562,13 @@ public class SnapPuller {
* *
* @param downloadCompleteIndex is it a fresh index copy * @param downloadCompleteIndex is it a fresh index copy
* @param tmpIdxDir the directory to which files need to be downloadeed to * @param tmpIdxDir the directory to which files need to be downloadeed to
* @param latestVersion the version number * @param latestGeneration the version number
*/ */
private void downloadIndexFiles(boolean downloadCompleteIndex, File tmpIdxDir, long latestVersion) throws Exception { private void downloadIndexFiles(boolean downloadCompleteIndex, File tmpIdxDir, long latestGeneration) throws Exception {
for (Map<String, Object> file : filesToDownload) { for (Map<String, Object> file : filesToDownload) {
File localIndexFile = new File(solrCore.getIndexDir(), (String) file.get(NAME)); File localIndexFile = new File(solrCore.getIndexDir(), (String) file.get(NAME));
if (!localIndexFile.exists() || downloadCompleteIndex) { if (!localIndexFile.exists() || downloadCompleteIndex) {
fileFetcher = new FileFetcher(tmpIdxDir, file, (String) file.get(NAME), false, latestVersion); fileFetcher = new FileFetcher(tmpIdxDir, file, (String) file.get(NAME), false, latestGeneration);
currentFile = file; currentFile = file;
fileFetcher.fetchFile(); fileFetcher.fetchFile();
filesDownloaded.add(new HashMap<String, Object>(file)); filesDownloaded.add(new HashMap<String, Object>(file));
@ -896,10 +897,10 @@ public class SnapPuller {
private boolean aborted = false; private boolean aborted = false;
private Long indexVersion; private Long indexGen;
FileFetcher(File dir, Map<String, Object> fileDetails, String saveAs, FileFetcher(File dir, Map<String, Object> fileDetails, String saveAs,
boolean isConf, long latestVersion) throws IOException { boolean isConf, long latestGen) throws IOException {
this.copy2Dir = dir; this.copy2Dir = dir;
this.fileName = (String) fileDetails.get(NAME); this.fileName = (String) fileDetails.get(NAME);
this.size = (Long) fileDetails.get(SIZE); this.size = (Long) fileDetails.get(SIZE);
@ -908,7 +909,7 @@ public class SnapPuller {
if(fileDetails.get(LAST_MODIFIED) != null){ if(fileDetails.get(LAST_MODIFIED) != null){
lastmodified = (Long)fileDetails.get(LAST_MODIFIED); lastmodified = (Long)fileDetails.get(LAST_MODIFIED);
} }
indexVersion = latestVersion; indexGen = latestGen;
this.file = new File(copy2Dir, saveAs); this.file = new File(copy2Dir, saveAs);
@ -1077,7 +1078,7 @@ public class SnapPuller {
//the method is command=filecontent //the method is command=filecontent
post.addParameter(COMMAND, CMD_GET_FILE); post.addParameter(COMMAND, CMD_GET_FILE);
//add the version to download. This is used to reserve the download //add the version to download. This is used to reserve the download
post.addParameter(CMD_INDEX_VERSION, indexVersion.toString()); post.addParameter(GENERATION, indexGen.toString());
if (isConf) { if (isConf) {
//set cf instead of file for config file //set cf instead of file for config file
post.addParameter(CONF_FILE_SHORT, fileName); post.addParameter(CONF_FILE_SHORT, fileName);

View File

@ -70,7 +70,7 @@ public class SnapShooter {
} }
void createSnapAsync(final IndexCommit indexCommit, final int numberToKeep, final ReplicationHandler replicationHandler) { void createSnapAsync(final IndexCommit indexCommit, final int numberToKeep, final ReplicationHandler replicationHandler) {
replicationHandler.core.getDeletionPolicy().saveCommitPoint(indexCommit.getVersion()); replicationHandler.core.getDeletionPolicy().saveCommitPoint(indexCommit.getGeneration());
new Thread() { new Thread() {
@Override @Override
@ -112,7 +112,7 @@ public class SnapShooter {
LOG.error("Exception while creating snapshot", e); LOG.error("Exception while creating snapshot", e);
details.add("snapShootException", e.getMessage()); details.add("snapShootException", e.getMessage());
} finally { } finally {
replicationHandler.core.getDeletionPolicy().releaseCommitPoint(indexCommit.getVersion()); replicationHandler.core.getDeletionPolicy().releaseCommitPoint(indexCommit.getGeneration());
replicationHandler.snapShootDetails = details; replicationHandler.snapShootDetails = details;
if (lock != null) { if (lock != null) {
try { try {

View File

@ -48,6 +48,7 @@ import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.FieldType; import org.apache.solr.schema.FieldType;
import org.apache.solr.update.SolrIndexWriter;
import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrIndexSearcher;
@ -562,7 +563,10 @@ public class LukeRequestHandler extends RequestHandlerBase
indexInfo.add("current", reader.isCurrent() ); indexInfo.add("current", reader.isCurrent() );
indexInfo.add("hasDeletions", reader.hasDeletions() ); indexInfo.add("hasDeletions", reader.hasDeletions() );
indexInfo.add("directory", dir ); indexInfo.add("directory", dir );
indexInfo.add("lastModified", new Date(IndexReader.lastModified(dir)) ); String s = reader.getIndexCommit().getUserData().get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY);
if (s != null) {
indexInfo.add("lastModified", new Date(Long.parseLong(s)));
}
return indexInfo; return indexInfo;
} }
//////////////////////// SolrInfoMBeans methods ////////////////////// //////////////////////// SolrInfoMBeans methods //////////////////////

View File

@ -29,6 +29,7 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.solr.core.IndexDeletionPolicyWrapper;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrConfig.HttpCachingConfig.LastModFrom; import org.apache.solr.core.SolrConfig.HttpCachingConfig.LastModFrom;
@ -157,7 +158,7 @@ public final class HttpCacheHeaderUtil {
// assume default, change if needed (getOpenTime() should be fast) // assume default, change if needed (getOpenTime() should be fast)
lastMod = lastMod =
LastModFrom.DIRLASTMOD == lastModFrom LastModFrom.DIRLASTMOD == lastModFrom
? IndexReader.lastModified(searcher.getIndexReader().directory()) ? IndexDeletionPolicyWrapper.getCommitTimestamp(searcher.getIndexReader().getIndexCommit())
: searcher.getOpenTime(); : searcher.getOpenTime();
} catch (IOException e) { } catch (IOException e) {
// we're pretty freaking screwed if this happens // we're pretty freaking screwed if this happens

View File

@ -22,6 +22,8 @@ package org.apache.solr.update;
import java.io.IOException; import java.io.IOException;
import java.net.URL; import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@ -387,7 +389,9 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
} }
// SolrCore.verbose("writer.commit() start writer=",writer); // SolrCore.verbose("writer.commit() start writer=",writer);
writer.commit(); final Map<String,String> commitData = new HashMap<String,String>();
commitData.put(SolrIndexWriter.COMMIT_TIME_MSEC_KEY, String.valueOf(System.currentTimeMillis()));
writer.commit(commitData);
// SolrCore.verbose("writer.commit() end"); // SolrCore.verbose("writer.commit() end");
numDocsPending.set(0); numDocsPending.set(0);
callPostCommitCallbacks(); callPostCommitCallbacks();

View File

@ -50,6 +50,11 @@ public class SolrIndexWriter extends IndexWriter {
public static final AtomicLong numOpens = new AtomicLong(); public static final AtomicLong numOpens = new AtomicLong();
public static final AtomicLong numCloses = new AtomicLong(); public static final AtomicLong numCloses = new AtomicLong();
/** Stored into each Lucene commit to record the
* System.currentTimeMillis() when commit was called. */
public static final String COMMIT_TIME_MSEC_KEY = "commitTimeMSec";
String name; String name;
private DirectoryFactory directoryFactory; private DirectoryFactory directoryFactory;

View File

@ -92,10 +92,10 @@ public class TestSolrDeletionPolicy1 extends SolrTestCaseJ4 {
addDocs(); addDocs();
Map<Long, IndexCommit> commits = delPolicy.getCommits(); Map<Long, IndexCommit> commits = delPolicy.getCommits();
IndexCommit latest = delPolicy.getLatestCommit(); IndexCommit latest = delPolicy.getLatestCommit();
for (Long version : commits.keySet()) { for (Long gen : commits.keySet()) {
if (commits.get(version) == latest) if (commits.get(gen) == latest)
continue; continue;
assertEquals(1, commits.get(version).getSegmentCount()); assertEquals(1, commits.get(gen).getSegmentCount());
} }
} }
@ -126,7 +126,7 @@ public class TestSolrDeletionPolicy1 extends SolrTestCaseJ4 {
); );
commits = delPolicy.getCommits(); commits = delPolicy.getCommits();
assertTrue(!commits.containsKey(ic.getVersion())); assertTrue(!commits.containsKey(ic.getGeneration()));
} }
} }