mirror of https://github.com/apache/lucene.git
SOLR-2654: Directorys used by a SolrCore are now closed when they are no longer used.
SOLR-2654: The same Directory instance is now always used across a SolrCore so that it's easier to add other DirectoryFactory's without static caching hacks. git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1159378 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
277c674be0
commit
7354f8d9e8
|
@ -1180,8 +1180,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
|||
|
||||
/** Returns the Directory used by this index. */
|
||||
public Directory getDirectory() {
|
||||
// Pass false because the flush during closing calls getDirectory
|
||||
ensureOpen(false);
|
||||
return directory;
|
||||
}
|
||||
|
||||
|
|
|
@ -229,6 +229,9 @@ Bug Fixes
|
|||
|
||||
* SOLR-2682: Remove addException() in SimpleFacet. FacetComponent no longer catches and embeds
|
||||
exceptions occurred during facet processing, it throws HTTP 400 or 500 exceptions instead. (koji)
|
||||
|
||||
* SOLR-2654: Directorys used by a SolrCore are now closed when they are no longer used.
|
||||
(Mark Miller)
|
||||
|
||||
Other Changes
|
||||
----------------------
|
||||
|
@ -293,7 +296,11 @@ Other Changes
|
|||
|
||||
* SOLR-2698: Enhance CoreAdmin STATUS command to return index size.
|
||||
(Yury Kats, hossman, Mark Miller)
|
||||
|
||||
|
||||
* SOLR-2654: The same Directory instance is now always used across a SolrCore so that
|
||||
it's easier to add other DirectoryFactory's without static caching hacks.
|
||||
(Mark Miller)
|
||||
|
||||
Documentation
|
||||
----------------------
|
||||
|
||||
|
|
|
@ -0,0 +1,221 @@
|
|||
package org.apache.solr.core;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.NativeFSLockFactory;
|
||||
import org.apache.lucene.store.NoLockFactory;
|
||||
import org.apache.lucene.store.SimpleFSLockFactory;
|
||||
import org.apache.lucene.store.SingleInstanceLockFactory;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* A {@link DirectoryFactory} impl base class for caching Directory instances
|
||||
* per path. Most DirectoryFactory implementations will want to extend this
|
||||
* class and simply implement {@link DirectoryFactory#create(String)}.
|
||||
*
|
||||
*/
|
||||
public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
||||
class CacheValue {
|
||||
Directory directory;
|
||||
int refCnt = 1;
|
||||
public String path;
|
||||
public boolean doneWithDir = false;
|
||||
}
|
||||
|
||||
private static Logger log = LoggerFactory
|
||||
.getLogger(CachingDirectoryFactory.class);
|
||||
|
||||
protected Map<String,CacheValue> byPathCache = new HashMap<String,CacheValue>();
|
||||
|
||||
protected Map<Directory,CacheValue> byDirectoryCache = new HashMap<Directory,CacheValue>();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see org.apache.solr.core.DirectoryFactory#close()
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
synchronized (this) {
|
||||
for (CacheValue val : byDirectoryCache.values()) {
|
||||
val.directory.close();
|
||||
}
|
||||
byDirectoryCache.clear();
|
||||
byPathCache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
private void close(Directory directory) throws IOException {
|
||||
synchronized (this) {
|
||||
CacheValue cacheValue = byDirectoryCache.get(directory);
|
||||
if (cacheValue == null) {
|
||||
throw new IllegalArgumentException("Unknown directory: " + directory
|
||||
+ " " + byDirectoryCache);
|
||||
}
|
||||
cacheValue.refCnt--;
|
||||
if (cacheValue.refCnt == 0 && cacheValue.doneWithDir) {
|
||||
directory.close();
|
||||
byDirectoryCache.remove(directory);
|
||||
byPathCache.remove(cacheValue.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract Directory create(String path) throws IOException;
|
||||
|
||||
@Override
|
||||
public boolean exists(String path) {
|
||||
// back compat behavior
|
||||
File dirFile = new File(path);
|
||||
return dirFile.canRead() && dirFile.list().length > 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see org.apache.solr.core.DirectoryFactory#get(java.lang.String,
|
||||
* java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public final Directory get(String path, String rawLockType)
|
||||
throws IOException {
|
||||
return get(path, rawLockType, false);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see org.apache.solr.core.DirectoryFactory#get(java.lang.String,
|
||||
* java.lang.String, boolean)
|
||||
*/
|
||||
@Override
|
||||
public final Directory get(String path, String rawLockType, boolean forceNew)
|
||||
throws IOException {
|
||||
String fullPath = new File(path).getAbsolutePath();
|
||||
synchronized (this) {
|
||||
CacheValue cacheValue = byPathCache.get(fullPath);
|
||||
Directory directory = null;
|
||||
if (cacheValue != null) {
|
||||
directory = cacheValue.directory;
|
||||
if (forceNew) {
|
||||
cacheValue.doneWithDir = true;
|
||||
if (cacheValue.refCnt == 0) {
|
||||
close(cacheValue.directory);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (directory == null || forceNew) {
|
||||
directory = create(fullPath);
|
||||
|
||||
CacheValue newCacheValue = new CacheValue();
|
||||
newCacheValue.directory = directory;
|
||||
newCacheValue.path = fullPath;
|
||||
|
||||
injectLockFactory(directory, path, rawLockType);
|
||||
|
||||
byDirectoryCache.put(directory, newCacheValue);
|
||||
byPathCache.put(fullPath, newCacheValue);
|
||||
} else {
|
||||
cacheValue.refCnt++;
|
||||
}
|
||||
|
||||
return directory;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* org.apache.solr.core.DirectoryFactory#incRef(org.apache.lucene.store.Directory
|
||||
* )
|
||||
*/
|
||||
public void incRef(Directory directory) {
|
||||
synchronized (this) {
|
||||
CacheValue cacheValue = byDirectoryCache.get(directory);
|
||||
if (cacheValue == null) {
|
||||
throw new IllegalArgumentException("Unknown directory: " + directory);
|
||||
}
|
||||
|
||||
cacheValue.refCnt++;
|
||||
}
|
||||
}
|
||||
|
||||
public void init(NamedList args) {}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* org.apache.solr.core.DirectoryFactory#release(org.apache.lucene.store.Directory
|
||||
* )
|
||||
*/
|
||||
@Override
|
||||
public void release(Directory directory) throws IOException {
|
||||
if (directory == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
close(directory);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param dir
|
||||
* @param lockPath
|
||||
* @param rawLockType
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
private static Directory injectLockFactory(Directory dir, String lockPath,
|
||||
String rawLockType) throws IOException {
|
||||
if (null == rawLockType) {
|
||||
// we default to "simple" for backwards compatibility
|
||||
log.warn("No lockType configured for " + dir + " assuming 'simple'");
|
||||
rawLockType = "simple";
|
||||
}
|
||||
final String lockType = rawLockType.toLowerCase(Locale.ENGLISH).trim();
|
||||
|
||||
if ("simple".equals(lockType)) {
|
||||
// multiple SimpleFSLockFactory instances should be OK
|
||||
dir.setLockFactory(new SimpleFSLockFactory(lockPath));
|
||||
} else if ("native".equals(lockType)) {
|
||||
dir.setLockFactory(new NativeFSLockFactory(lockPath));
|
||||
} else if ("single".equals(lockType)) {
|
||||
if (!(dir.getLockFactory() instanceof SingleInstanceLockFactory)) dir
|
||||
.setLockFactory(new SingleInstanceLockFactory());
|
||||
} else if ("none".equals(lockType)) {
|
||||
// Recipe for disaster
|
||||
log.error("CONFIGURATION WARNING: locks are disabled on " + dir);
|
||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
} else {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Unrecognized lockType: " + rawLockType);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
package org.apache.solr.core;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
|
@ -16,33 +17,84 @@ package org.apache.solr.core;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
|
||||
|
||||
/**
|
||||
* Provides access to a Directory implementation.
|
||||
*
|
||||
* Provides access to a Directory implementation. You must release every
|
||||
* Directory that you get.
|
||||
*/
|
||||
public abstract class DirectoryFactory implements NamedListInitializedPlugin {
|
||||
|
||||
public abstract class DirectoryFactory implements NamedListInitializedPlugin,
|
||||
Closeable {
|
||||
|
||||
/**
|
||||
* Opens a Lucene directory
|
||||
* Close the this and all of the Directories it contains.
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract Directory open(String path) throws IOException;
|
||||
public abstract void close() throws IOException;
|
||||
|
||||
public boolean exists(String path) {
|
||||
// back compat behavior
|
||||
File dirFile = new File(path);
|
||||
return dirFile.canRead();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Directory for a given path.
|
||||
*
|
||||
* @param path
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
protected abstract Directory create(String path) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns true if a Directory exists for a given path.
|
||||
*
|
||||
* @param path
|
||||
* @return
|
||||
*/
|
||||
public abstract boolean exists(String path);
|
||||
|
||||
/**
|
||||
* Returns the Directory for a given path, using the specified rawLockType.
|
||||
* Will return the same Directory instance for the same path.
|
||||
*
|
||||
* @param path
|
||||
* @param rawLockType
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract Directory get(String path, String rawLockType)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the Directory for a given path, using the specified rawLockType.
|
||||
* Will return the same Directory instance for the same path unless forceNew,
|
||||
* in which case a new Directory is returned.
|
||||
*
|
||||
* @param path
|
||||
* @param rawLockType
|
||||
* @param forceNew
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract Directory get(String path, String rawLockType,
|
||||
boolean forceNew) throws IOException;
|
||||
|
||||
/**
|
||||
* Increment the number of references to the given Directory. You must call
|
||||
* release for every call to this method.
|
||||
*
|
||||
* @param directory
|
||||
*/
|
||||
public abstract void incRef(Directory directory);
|
||||
|
||||
/**
|
||||
* Releases the Directory so that it may be closed when it is no longer
|
||||
* referenced.
|
||||
*
|
||||
* @param directory
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract void release(Directory directory) throws IOException;
|
||||
|
||||
public void init(NamedList args) {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,23 +38,11 @@ import java.io.IOException;
|
|||
* </ul>
|
||||
*
|
||||
**/
|
||||
public class MMapDirectoryFactory extends DirectoryFactory {
|
||||
public class MMapDirectoryFactory extends CachingDirectoryFactory {
|
||||
private transient static Logger log = LoggerFactory.getLogger(MMapDirectoryFactory.class);
|
||||
boolean unmapHack;
|
||||
private int maxChunk;
|
||||
|
||||
@Override
|
||||
public Directory open(String path) throws IOException {
|
||||
MMapDirectory mapDirectory = new MMapDirectory(new File(path));
|
||||
try {
|
||||
mapDirectory.setUseUnmap(unmapHack);
|
||||
} catch (Exception e) {
|
||||
log.warn("Unmap not supported on this JVM, continuing on without setting unmap", e);
|
||||
}
|
||||
mapDirectory.setMaxChunkSize(maxChunk);
|
||||
return mapDirectory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(NamedList args) {
|
||||
SolrParams params = SolrParams.toSolrParams( args );
|
||||
|
@ -64,4 +52,16 @@ public class MMapDirectoryFactory extends DirectoryFactory {
|
|||
}
|
||||
unmapHack = params.getBool("unmap", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Directory create(String path) throws IOException {
|
||||
MMapDirectory mapDirectory = new MMapDirectory(new File(path));
|
||||
try {
|
||||
mapDirectory.setUseUnmap(unmapHack);
|
||||
} catch (Exception e) {
|
||||
log.warn("Unmap not supported on this JVM, continuing on without setting unmap", e);
|
||||
}
|
||||
mapDirectory.setMaxChunkSize(maxChunk);
|
||||
return mapDirectory;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,10 +27,11 @@ import java.io.IOException;
|
|||
* Factory to instantiate {@link org.apache.lucene.store.NIOFSDirectory}
|
||||
*
|
||||
**/
|
||||
public class NIOFSDirectoryFactory extends DirectoryFactory {
|
||||
public class NIOFSDirectoryFactory extends CachingDirectoryFactory {
|
||||
|
||||
@Override
|
||||
public Directory open(String path) throws IOException {
|
||||
protected Directory create(String path) throws IOException {
|
||||
|
||||
return new NIOFSDirectory(new File(path));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,48 +17,21 @@
|
|||
|
||||
package org.apache.solr.core;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.NRTCachingDirectory;
|
||||
|
||||
public class RefCntRamDirectory extends RAMDirectory {
|
||||
|
||||
private final AtomicInteger refCount = new AtomicInteger();
|
||||
|
||||
public RefCntRamDirectory() {
|
||||
super();
|
||||
refCount.set(1);
|
||||
}
|
||||
|
||||
public RefCntRamDirectory(Directory dir) throws IOException {
|
||||
this();
|
||||
for (String file : dir.listAll()) {
|
||||
dir.copy(this, file, file, IOContext.DEFAULT);
|
||||
}
|
||||
}
|
||||
|
||||
public void incRef() {
|
||||
ensureOpen();
|
||||
refCount.incrementAndGet();
|
||||
}
|
||||
|
||||
public void decRef() {
|
||||
ensureOpen();
|
||||
if (refCount.getAndDecrement() == 1) {
|
||||
super.close();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Factory to instantiate {@link org.apache.lucene.store.NRTCachingDirectory}
|
||||
*/
|
||||
public class NRTCachingDirectoryFactory extends StandardDirectoryFactory {
|
||||
|
||||
@Override
|
||||
public final synchronized void close() {
|
||||
decRef();
|
||||
}
|
||||
|
||||
public boolean isOpen() {
|
||||
return isOpen;
|
||||
protected Directory create(String path) throws IOException {
|
||||
return new NRTCachingDirectory(FSDirectory.open(new File(path)), 4, 48);
|
||||
}
|
||||
|
||||
}
|
|
@ -19,37 +19,30 @@ package org.apache.solr.core;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
/**
|
||||
* Directory provider for using lucene RAMDirectory
|
||||
* Factory to instantiate {@link org.apache.lucene.store.RAMDirectory}
|
||||
*/
|
||||
public class RAMDirectoryFactory extends StandardDirectoryFactory {
|
||||
private static Map<String, RefCntRamDirectory> directories = new HashMap<String, RefCntRamDirectory>();
|
||||
|
||||
@Override
|
||||
public Directory open(String path) throws IOException {
|
||||
synchronized (RAMDirectoryFactory.class) {
|
||||
RefCntRamDirectory directory = directories.get(path);
|
||||
if (directory == null || !directory.isOpen()) {
|
||||
directory = (RefCntRamDirectory) openNew(path);
|
||||
directories.put(path, directory);
|
||||
} else {
|
||||
directory.incRef();
|
||||
}
|
||||
|
||||
return directory;
|
||||
}
|
||||
protected Directory create(String path) throws IOException {
|
||||
return new RAMDirectory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists(String path) {
|
||||
synchronized (RAMDirectoryFactory.class) {
|
||||
RefCntRamDirectory directory = directories.get(path);
|
||||
if (directory == null || !directory.isOpen()) {
|
||||
String fullPath = new File(path).getAbsolutePath();
|
||||
synchronized (DirectoryFactory.class) {
|
||||
CacheValue cacheValue = byPathCache.get(fullPath);
|
||||
Directory directory = null;
|
||||
if (cacheValue != null) {
|
||||
directory = cacheValue.directory;
|
||||
}
|
||||
if (directory == null) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
|
@ -57,19 +50,4 @@ public class RAMDirectoryFactory extends StandardDirectoryFactory {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Non-public for unit-test access only. Do not use directly
|
||||
*/
|
||||
Directory openNew(String path) throws IOException {
|
||||
Directory directory;
|
||||
File dirFile = new File(path);
|
||||
boolean indexExists = dirFile.canRead();
|
||||
if (indexExists) {
|
||||
Directory dir = super.open(path);
|
||||
directory = new RefCntRamDirectory(dir);
|
||||
} else {
|
||||
directory = new RefCntRamDirectory();
|
||||
}
|
||||
return directory;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,10 +27,10 @@ import java.io.IOException;
|
|||
* Factory to instantiate {@link org.apache.lucene.store.SimpleFSDirectory}
|
||||
*
|
||||
**/
|
||||
public class SimpleFSDirectoryFactory extends DirectoryFactory {
|
||||
public class SimpleFSDirectoryFactory extends CachingDirectoryFactory {
|
||||
|
||||
@Override
|
||||
public Directory open(String path) throws IOException {
|
||||
protected Directory create(String path) throws IOException {
|
||||
return new SimpleFSDirectory(new File(path));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -320,7 +320,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
|
||||
// gets a non-caching searcher
|
||||
public SolrIndexSearcher newSearcher(String name, boolean readOnly) throws IOException {
|
||||
return new SolrIndexSearcher(this, schema, name, directoryFactory.open(getIndexDir()), readOnly, false);
|
||||
return new SolrIndexSearcher(this, getNewIndexDir(), schema, getSolrConfig().mainIndexConfig, name, readOnly, false, directoryFactory);
|
||||
}
|
||||
|
||||
|
||||
|
@ -355,7 +355,6 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
|
||||
void initIndex() {
|
||||
try {
|
||||
initDirectoryFactory();
|
||||
String indexDir = getNewIndexDir();
|
||||
boolean indexExists = getDirectoryFactory().exists(indexDir);
|
||||
boolean firstTime;
|
||||
|
@ -369,13 +368,13 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
if (indexExists && firstTime && removeLocks) {
|
||||
// to remove locks, the directory must already exist... so we create it
|
||||
// if it didn't exist already...
|
||||
Directory dir = SolrIndexWriter.getDirectory(indexDir, getDirectoryFactory(), solrConfig.mainIndexConfig);
|
||||
Directory dir = directoryFactory.get(indexDir, getSolrConfig().mainIndexConfig.lockType);
|
||||
if (dir != null) {
|
||||
if (IndexWriter.isLocked(dir)) {
|
||||
log.warn(logid+"WARNING: Solr index directory '" + indexDir+ "' is locked. Unlocking...");
|
||||
IndexWriter.unlock(dir);
|
||||
}
|
||||
dir.close();
|
||||
directoryFactory.release(dir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -384,7 +383,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
log.warn(logid+"Solr index directory '" + new File(indexDir) + "' doesn't exist."
|
||||
+ " Creating new index...");
|
||||
|
||||
SolrIndexWriter writer = new SolrIndexWriter("SolrCore.initIndex", indexDir, getDirectoryFactory(), true, schema, solrConfig.mainIndexConfig, solrDelPolicy, codecProvider);
|
||||
SolrIndexWriter writer = new SolrIndexWriter("SolrCore.initIndex", indexDir, getDirectoryFactory(), true, schema, solrConfig.mainIndexConfig, solrDelPolicy, codecProvider, false);
|
||||
writer.close();
|
||||
}
|
||||
|
||||
|
@ -490,20 +489,18 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
* @since solr 1.0
|
||||
*/
|
||||
public SolrCore(String dataDir, IndexSchema schema) throws ParserConfigurationException, IOException, SAXException {
|
||||
this(null, dataDir, new SolrConfig(), schema, null );
|
||||
this(null, dataDir, new SolrConfig(), schema, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new core and register it in the list of cores.
|
||||
* If a core with the same name already exists, it will be stopped and replaced by this one.
|
||||
*
|
||||
* @param name
|
||||
* @param dataDir the index directory
|
||||
* @param config a solr config instance
|
||||
* @param schema a solr schema instance
|
||||
* @param cd
|
||||
*
|
||||
* @since solr 1.3
|
||||
*@param dataDir the index directory
|
||||
*@param config a solr config instance
|
||||
*@param schema a solr schema instance
|
||||
*@param updateHandler
|
||||
*
|
||||
*@since solr 1.3
|
||||
*/
|
||||
public SolrCore(String name, String dataDir, SolrConfig config, IndexSchema schema, CoreDescriptor cd) {
|
||||
this(name, dataDir, config, schema, cd, null);
|
||||
|
@ -559,6 +556,13 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
initDeletionPolicy();
|
||||
|
||||
this.codecProvider = initCodecProvider(solrConfig, schema);
|
||||
|
||||
if (updateHandler == null) {
|
||||
initDirectoryFactory();
|
||||
} else {
|
||||
directoryFactory = updateHandler.getIndexWriterProvider().getDirectoryFactory();
|
||||
}
|
||||
|
||||
initIndex();
|
||||
|
||||
initWriters();
|
||||
|
@ -601,7 +605,6 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
this.updateHandler = createUpdateHandler(updateHandlerClass == null ? DirectUpdateHandler2.class
|
||||
.getName() : updateHandlerClass);
|
||||
} else {
|
||||
|
||||
this.updateHandler = createUpdateHandler(
|
||||
updateHandlerClass == null ? DirectUpdateHandler2.class.getName()
|
||||
: updateHandlerClass, updateHandler);
|
||||
|
@ -737,11 +740,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
} catch (Exception e) {
|
||||
SolrException.log(log, e);
|
||||
}
|
||||
try {
|
||||
updateHandler.close();
|
||||
} catch (Exception e) {
|
||||
SolrException.log(log,e);
|
||||
}
|
||||
|
||||
try {
|
||||
searcherExecutor.shutdown();
|
||||
if (!searcherExecutor.awaitTermination(60, TimeUnit.SECONDS)) {
|
||||
|
@ -763,6 +762,12 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
SolrException.log(log,e);
|
||||
}
|
||||
|
||||
try {
|
||||
updateHandler.close();
|
||||
} catch (Exception e) {
|
||||
SolrException.log(log,e);
|
||||
}
|
||||
|
||||
if( closeHooks != null ) {
|
||||
for( CloseHook hook : closeHooks ) {
|
||||
try {
|
||||
|
@ -1114,12 +1119,11 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
currentReader.incRef();
|
||||
}
|
||||
|
||||
tmp = new SolrIndexSearcher(this, schema, "main", newReader, true, true);
|
||||
tmp = new SolrIndexSearcher(this, schema, "main", newReader, true, true, true, directoryFactory);
|
||||
}
|
||||
|
||||
} else {
|
||||
IndexReader reader = getIndexReaderFactory().newReader(getDirectoryFactory().open(newIndexDir), true);
|
||||
tmp = new SolrIndexSearcher(this, schema, "main", reader, true, true);
|
||||
tmp = new SolrIndexSearcher(this, newIndexDir, schema, getSolrConfig().mainIndexConfig, "main", true, true, directoryFactory);
|
||||
}
|
||||
} catch (Throwable th) {
|
||||
synchronized(searcherLock) {
|
||||
|
|
|
@ -23,13 +23,14 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.FSDirectory;
|
||||
|
||||
/**
|
||||
* Directory provider which mimics original Solr FSDirectory based behavior.
|
||||
* Directory provider which mimics original Solr
|
||||
* {@link org.apache.lucene.store.FSDirectory} based behavior.
|
||||
*
|
||||
*/
|
||||
public class StandardDirectoryFactory extends DirectoryFactory {
|
||||
public class StandardDirectoryFactory extends CachingDirectoryFactory {
|
||||
|
||||
@Override
|
||||
public Directory open(String path) throws IOException {
|
||||
protected Directory create(String path) throws IOException {
|
||||
return FSDirectory.open(new File(path));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.handler.admin;
|
|||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.solr.cloud.CloudDescriptor;
|
||||
import org.apache.solr.common.SolrException;
|
||||
|
@ -183,6 +184,7 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
RefCounted<SolrIndexSearcher>[] searchers = null;
|
||||
// stores readers created from indexDir param values
|
||||
IndexReader[] readersToBeClosed = null;
|
||||
Directory[] dirsToBeReleased = null;
|
||||
if (core != null) {
|
||||
try {
|
||||
String[] dirNames = params.getParams(CoreAdminParams.INDEX_DIR);
|
||||
|
@ -203,9 +205,12 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
}
|
||||
} else {
|
||||
readersToBeClosed = new IndexReader[dirNames.length];
|
||||
dirsToBeReleased = new Directory[dirNames.length];
|
||||
DirectoryFactory dirFactory = core.getDirectoryFactory();
|
||||
for (int i = 0; i < dirNames.length; i++) {
|
||||
readersToBeClosed[i] = IndexReader.open(dirFactory.open(dirNames[i]), true);
|
||||
Directory dir = dirFactory.get(dirNames[i], core.getSolrConfig().mainIndexConfig.lockType);
|
||||
dirsToBeReleased[i] = dir;
|
||||
readersToBeClosed[i] = IndexReader.open(dir, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -241,6 +246,12 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
}
|
||||
}
|
||||
if (readersToBeClosed != null) IOUtils.closeSafely(true, readersToBeClosed);
|
||||
if (dirsToBeReleased != null) {
|
||||
for (Directory dir : dirsToBeReleased) {
|
||||
DirectoryFactory dirFactory = core.getDirectoryFactory();
|
||||
dirFactory.release(dir);
|
||||
}
|
||||
}
|
||||
if (wrappedReq != null) wrappedReq.close();
|
||||
core.close();
|
||||
}
|
||||
|
|
|
@ -17,12 +17,51 @@
|
|||
|
||||
package org.apache.solr.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.FieldSelector;
|
||||
import org.apache.lucene.document.FieldSelectorResult;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.DocsEnum;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.index.MultiDocsEnum;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TimeLimitingCollector;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopDocsCollector;
|
||||
import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
@ -31,6 +70,7 @@ import org.apache.lucene.util.OpenBitSet;
|
|||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.core.SolrInfoMBean;
|
||||
|
@ -41,14 +81,10 @@ import org.apache.solr.request.UnInvertedField;
|
|||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.apache.solr.update.SolrIndexConfig;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
|
||||
/**
|
||||
* SolrIndexSearcher adds schema awareness and caching functionality
|
||||
|
@ -99,40 +135,31 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean {
|
|||
|
||||
private final Collection<String> fieldNames;
|
||||
private Collection<String> storedHighlightFieldNames;
|
||||
|
||||
|
||||
/*
|
||||
* Creates a searcher searching the index in the provided directory. Note:
|
||||
* uses the main IndexReaderFactory for the specified SolrCore.
|
||||
*
|
||||
* @see SolrCore#getMainIndexReaderFactory
|
||||
*/
|
||||
public SolrIndexSearcher(SolrCore core, IndexSchema schema, String name,
|
||||
Directory directory, boolean enableCache) throws IOException {
|
||||
this(core, schema,name, core.getIndexReaderFactory().newReader(directory, false), true, enableCache);
|
||||
}
|
||||
private DirectoryFactory directoryFactory;
|
||||
|
||||
/** Creates a searcher searching the index in the provided directory. */
|
||||
public SolrIndexSearcher(SolrCore core, IndexSchema schema, String name, Directory directory, boolean readOnly, boolean enableCache) throws IOException {
|
||||
this(core, schema,name, core.getIndexReaderFactory().newReader(directory, readOnly), true, enableCache);
|
||||
public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, SolrIndexConfig config, String name, boolean readOnly, boolean enableCache, DirectoryFactory directoryFactory) throws IOException {
|
||||
// we don't need to reserve the directory because we get it from the factory
|
||||
this(core, schema,name, core.getIndexReaderFactory().newReader(directoryFactory.get(path, config.lockType), readOnly), true, enableCache, false, directoryFactory);
|
||||
}
|
||||
|
||||
/** Creates a searcher searching the provided index. */
|
||||
public SolrIndexSearcher(SolrCore core, IndexSchema schema, String name, IndexReader r, boolean enableCache) {
|
||||
this(core, schema,name,r, false, enableCache);
|
||||
}
|
||||
|
||||
|
||||
public SolrIndexSearcher(SolrCore core, IndexSchema schema, String name, IndexReader r, boolean closeReader, boolean enableCache) {
|
||||
public SolrIndexSearcher(SolrCore core, IndexSchema schema, String name, IndexReader r, boolean closeReader, boolean enableCache, boolean reserveDirectory, DirectoryFactory directoryFactory) {
|
||||
super(r);
|
||||
this.directoryFactory = directoryFactory;
|
||||
this.reader = getIndexReader();
|
||||
this.core = core;
|
||||
this.schema = schema;
|
||||
this.name = "Searcher@" + Integer.toHexString(hashCode()) + (name!=null ? " "+name : "");
|
||||
log.info("Opening " + this.name);
|
||||
|
||||
if (r.directory() instanceof FSDirectory) {
|
||||
FSDirectory fsDirectory = (FSDirectory) r.directory();
|
||||
Directory dir = r.directory();
|
||||
|
||||
if (reserveDirectory) {
|
||||
// keep the directory from being released while we use it
|
||||
directoryFactory.incRef(dir);
|
||||
}
|
||||
|
||||
if (dir instanceof FSDirectory) {
|
||||
FSDirectory fsDirectory = (FSDirectory) dir;
|
||||
indexDir = fsDirectory.getDirectory().getAbsolutePath();
|
||||
}
|
||||
|
||||
|
@ -188,7 +215,6 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean {
|
|||
numOpens.incrementAndGet();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
|
@ -241,6 +267,10 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean {
|
|||
cache.close();
|
||||
}
|
||||
|
||||
|
||||
directoryFactory.release(getIndexReader().directory());
|
||||
|
||||
|
||||
// do this at the end so it only gets done if there are no exceptions
|
||||
numCloses.incrementAndGet();
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ final class CommitTracker implements Runnable {
|
|||
pending.cancel(true);
|
||||
pending = null;
|
||||
}
|
||||
scheduler.shutdown();
|
||||
scheduler.shutdownNow();
|
||||
}
|
||||
|
||||
/** schedule individual commits */
|
||||
|
|
|
@ -20,20 +20,22 @@ package org.apache.solr.update;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
|
||||
public final class DefaultIndexWriterProvider implements IndexWriterProvider {
|
||||
public final class DefaultSolrCoreState extends SolrCoreState {
|
||||
private int refCnt = 1;
|
||||
private IndexWriter indexWriter = null;
|
||||
|
||||
public DefaultIndexWriterProvider() {
|
||||
private SolrIndexWriter indexWriter = null;
|
||||
private DirectoryFactory directoryFactory;
|
||||
|
||||
public DefaultSolrCoreState(DirectoryFactory directoryFactory) {
|
||||
this.directoryFactory = directoryFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized IndexWriter getIndexWriter(SolrCore core) throws IOException {
|
||||
if (indexWriter == null) {
|
||||
indexWriter = createMainIndexWriter(core, "DirectUpdateHandler2", false);
|
||||
indexWriter = createMainIndexWriter(core, "DirectUpdateHandler2", false, false);
|
||||
}
|
||||
return indexWriter;
|
||||
}
|
||||
|
@ -44,14 +46,17 @@ public final class DefaultIndexWriterProvider implements IndexWriterProvider {
|
|||
indexWriter.close();
|
||||
}
|
||||
indexWriter = createMainIndexWriter(core, "DirectUpdateHandler2",
|
||||
false);
|
||||
false, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void decref() throws IOException {
|
||||
refCnt--;
|
||||
if (refCnt == 0 && indexWriter != null) {
|
||||
indexWriter.close();
|
||||
if (refCnt == 0) {
|
||||
if (indexWriter != null) {
|
||||
indexWriter.close();
|
||||
}
|
||||
directoryFactory.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,10 +75,15 @@ public final class DefaultIndexWriterProvider implements IndexWriterProvider {
|
|||
}
|
||||
|
||||
protected SolrIndexWriter createMainIndexWriter(SolrCore core, String name,
|
||||
boolean removeAllExisting) throws IOException {
|
||||
boolean removeAllExisting, boolean forceNewDirectory) throws IOException {
|
||||
return new SolrIndexWriter(name, core.getNewIndexDir(),
|
||||
core.getDirectoryFactory(), removeAllExisting, core.getSchema(),
|
||||
core.getSolrConfig().mainIndexConfig, core.getDeletionPolicy(), core.getCodecProvider());
|
||||
core.getSolrConfig().mainIndexConfig, core.getDeletionPolicy(), core.getCodecProvider(), forceNewDirectory);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DirectoryFactory getDirectoryFactory() {
|
||||
return directoryFactory;
|
||||
}
|
||||
|
||||
}
|
|
@ -52,7 +52,7 @@ import org.apache.solr.search.SolrIndexSearcher;
|
|||
* directly to the main Lucene index as opposed to adding to a separate smaller index.
|
||||
*/
|
||||
public class DirectUpdateHandler2 extends UpdateHandler {
|
||||
protected IndexWriterProvider indexWriterProvider;
|
||||
protected SolrCoreState indexWriterProvider;
|
||||
|
||||
// stats
|
||||
AtomicLong addCommands = new AtomicLong();
|
||||
|
@ -77,7 +77,7 @@ public class DirectUpdateHandler2 extends UpdateHandler {
|
|||
public DirectUpdateHandler2(SolrCore core) throws IOException {
|
||||
super(core);
|
||||
|
||||
indexWriterProvider = new DefaultIndexWriterProvider();
|
||||
indexWriterProvider = new DefaultSolrCoreState(core.getDirectoryFactory());
|
||||
|
||||
UpdateHandlerInfo updateHandlerInfo = core.getSolrConfig()
|
||||
.getUpdateHandlerInfo();
|
||||
|
@ -97,7 +97,7 @@ public class DirectUpdateHandler2 extends UpdateHandler {
|
|||
} else {
|
||||
// the impl has changed, so we cannot use the old state - decref it
|
||||
updateHandler.decref();
|
||||
indexWriterProvider = new DefaultIndexWriterProvider();
|
||||
indexWriterProvider = new DefaultSolrCoreState(core.getDirectoryFactory());
|
||||
}
|
||||
|
||||
UpdateHandlerInfo updateHandlerInfo = core.getSolrConfig()
|
||||
|
@ -348,8 +348,8 @@ public class DirectUpdateHandler2 extends UpdateHandler {
|
|||
if (newReader == currentReader) {
|
||||
currentReader.incRef();
|
||||
}
|
||||
|
||||
return new SolrIndexSearcher(core, schema, "main", newReader, true, true);
|
||||
|
||||
return new SolrIndexSearcher(core, schema, "main", newReader, true, true, true, core.getDirectoryFactory());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -401,6 +401,7 @@ public class DirectUpdateHandler2 extends UpdateHandler {
|
|||
softCommitTracker.close();
|
||||
|
||||
numDocsPending.set(0);
|
||||
|
||||
indexWriterProvider.decref();
|
||||
|
||||
log.info("closed " + this);
|
||||
|
@ -478,7 +479,7 @@ public class DirectUpdateHandler2 extends UpdateHandler {
|
|||
return "DirectUpdateHandler2" + getStatistics();
|
||||
}
|
||||
|
||||
public IndexWriterProvider getIndexWriterProvider() {
|
||||
public SolrCoreState getIndexWriterProvider() {
|
||||
return indexWriterProvider;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
package org.apache.solr.update;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
|
||||
/**
|
||||
* The state in this class can be easily shared between SolrCores across
|
||||
* SolrCore reloads.
|
||||
*
|
||||
*/
|
||||
public abstract class SolrCoreState {
|
||||
|
||||
/**
|
||||
* Force the creation of a new IndexWriter using the settings from the given
|
||||
* SolrCore.
|
||||
*
|
||||
* @param core
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract void newIndexWriter(SolrCore core) throws IOException;
|
||||
|
||||
/**
|
||||
* Get the current IndexWriter. If a new IndexWriter must be created, use the
|
||||
* settings from the given {@link SolrCore}.
|
||||
*
|
||||
* @param core
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract IndexWriter getIndexWriter(SolrCore core) throws IOException;
|
||||
|
||||
/**
|
||||
* Decrement the number of references to this state. When then number of
|
||||
* references hits 0, the state will close.
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract void decref() throws IOException;
|
||||
|
||||
/**
|
||||
* Increment the number of references to this state.
|
||||
*/
|
||||
public abstract void incref();
|
||||
|
||||
/**
|
||||
* Rollback the current IndexWriter. When creating the new IndexWriter use the
|
||||
* settings from the given {@link SolrCore}.
|
||||
*
|
||||
* @param core
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract void rollbackIndexWriter(SolrCore core) throws IOException;
|
||||
|
||||
/**
|
||||
* @return the {@link DirectoryFactory} that should be used.
|
||||
*/
|
||||
public abstract DirectoryFactory getDirectoryFactory();
|
||||
|
||||
}
|
|
@ -17,16 +17,6 @@
|
|||
|
||||
package org.apache.solr.update;
|
||||
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.codecs.CodecProvider;
|
||||
import org.apache.lucene.store.*;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -34,9 +24,18 @@ import java.io.OutputStream;
|
|||
import java.io.PrintStream;
|
||||
import java.text.DateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.lucene.index.IndexDeletionPolicy;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.codecs.CodecProvider;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* An IndexWriter that is configured via Solr config mechanisms.
|
||||
*
|
||||
|
@ -51,41 +50,11 @@ public class SolrIndexWriter extends IndexWriter {
|
|||
|
||||
String name;
|
||||
private PrintStream infoStream;
|
||||
private DirectoryFactory directoryFactory;
|
||||
|
||||
public static Directory getDirectory(String path, DirectoryFactory directoryFactory, SolrIndexConfig config) throws IOException {
|
||||
|
||||
Directory d = directoryFactory.open(path);
|
||||
|
||||
String rawLockType = (null == config) ? null : config.lockType;
|
||||
if (null == rawLockType) {
|
||||
// we default to "simple" for backwards compatibility
|
||||
log.warn("No lockType configured for " + path + " assuming 'simple'");
|
||||
rawLockType = "simple";
|
||||
}
|
||||
final String lockType = rawLockType.toLowerCase(Locale.ENGLISH).trim();
|
||||
|
||||
if ("simple".equals(lockType)) {
|
||||
// multiple SimpleFSLockFactory instances should be OK
|
||||
d.setLockFactory(new SimpleFSLockFactory(path));
|
||||
} else if ("native".equals(lockType)) {
|
||||
d.setLockFactory(new NativeFSLockFactory(path));
|
||||
} else if ("single".equals(lockType)) {
|
||||
if (!(d.getLockFactory() instanceof SingleInstanceLockFactory))
|
||||
d.setLockFactory(new SingleInstanceLockFactory());
|
||||
} else if ("none".equals(lockType)) {
|
||||
// Recipe for disaster
|
||||
log.error("CONFIGURATION WARNING: locks are disabled on " + path);
|
||||
d.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
} else {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Unrecognized lockType: " + rawLockType);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
public SolrIndexWriter(String name, String path, DirectoryFactory dirFactory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, CodecProvider codecProvider) throws IOException {
|
||||
public SolrIndexWriter(String name, String path, DirectoryFactory directoryFactory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, CodecProvider codecProvider, boolean forceNewDirectory) throws IOException {
|
||||
super(
|
||||
getDirectory(path, dirFactory, config),
|
||||
directoryFactory.get(path, config.lockType, forceNewDirectory),
|
||||
config.toIndexWriterConfig(schema).
|
||||
setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND).
|
||||
setIndexDeletionPolicy(delPolicy).setCodecProvider(codecProvider)
|
||||
|
@ -93,10 +62,11 @@ public class SolrIndexWriter extends IndexWriter {
|
|||
log.debug("Opened Writer " + name);
|
||||
this.name = name;
|
||||
|
||||
this.directoryFactory = directoryFactory;
|
||||
setInfoStream(config);
|
||||
numOpens.incrementAndGet();
|
||||
}
|
||||
|
||||
|
||||
private void setInfoStream(SolrIndexConfig config)
|
||||
throws IOException {
|
||||
String infoStreamFile = config.infoStreamFile;
|
||||
|
@ -142,9 +112,13 @@ public class SolrIndexWriter extends IndexWriter {
|
|||
* ****
|
||||
*/
|
||||
private volatile boolean isClosed = false;
|
||||
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
log.debug("Closing Writer " + name);
|
||||
Directory directory = getDirectory();
|
||||
|
||||
try {
|
||||
super.close();
|
||||
if(infoStream != null) {
|
||||
|
@ -152,6 +126,9 @@ public class SolrIndexWriter extends IndexWriter {
|
|||
}
|
||||
} finally {
|
||||
isClosed = true;
|
||||
|
||||
directoryFactory.release(directory);
|
||||
|
||||
numCloses.incrementAndGet();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,25 +18,20 @@
|
|||
package org.apache.solr.update;
|
||||
|
||||
|
||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import java.util.Vector;
|
||||
import java.io.IOException;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.core.PluginInfo;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.core.SolrEventListener;
|
||||
import org.apache.solr.core.SolrInfoMBean;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.util.plugin.SolrCoreAware;
|
||||
import org.apache.solr.core.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <code>UpdateHandler</code> handles requests to change the index
|
||||
|
@ -129,6 +124,7 @@ public abstract class UpdateHandler implements SolrInfoMBean {
|
|||
*/
|
||||
public abstract void newIndexWriter() throws IOException;
|
||||
|
||||
public abstract SolrCoreState getIndexWriterProvider();
|
||||
|
||||
public abstract int addDoc(AddUpdateCommand cmd) throws IOException;
|
||||
public abstract void delete(DeleteUpdateCommand cmd) throws IOException;
|
||||
|
|
|
@ -59,7 +59,6 @@ public class TestSolrCoreProperties extends LuceneTestCase {
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
solrJetty.stop();
|
||||
SolrTestCaseJ4.closeDirectories();
|
||||
AbstractSolrTestCase.recurseDelete(homeDir);
|
||||
super.tearDown();
|
||||
}
|
||||
|
|
|
@ -41,20 +41,16 @@ public class AlternateDirectoryTest extends SolrTestCaseJ4 {
|
|||
assertQ(req("q","*:*","qt","standard"));
|
||||
assertTrue(TestFSDirectoryFactory.openCalled);
|
||||
assertTrue(TestIndexReaderFactory.newReaderCalled);
|
||||
TestFSDirectoryFactory.dir.close();
|
||||
}
|
||||
|
||||
static public class TestFSDirectoryFactory extends DirectoryFactory {
|
||||
static public class TestFSDirectoryFactory extends CachingDirectoryFactory {
|
||||
public static volatile boolean openCalled = false;
|
||||
public static volatile Directory dir;
|
||||
|
||||
@Override
|
||||
public Directory open(String path) throws IOException {
|
||||
public Directory create(String path) throws IOException {
|
||||
openCalled = true;
|
||||
// need to close the directory, or otherwise the test fails.
|
||||
if (dir != null) {
|
||||
dir.close();
|
||||
}
|
||||
|
||||
return dir = newFSDirectory(new File(path));
|
||||
}
|
||||
|
||||
|
|
|
@ -17,11 +17,12 @@
|
|||
|
||||
package org.apache.solr.core;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.SingleInstanceLockFactory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/**
|
||||
* Test-case for RAMDirectoryFactory
|
||||
*/
|
||||
|
@ -33,28 +34,28 @@ public class RAMDirectoryFactoryTest extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void dotestOpenReturnsTheSameForSamePath() throws IOException {
|
||||
final Directory directory = new RefCntRamDirectory();
|
||||
final Directory directory = new RAMDirectory();
|
||||
RAMDirectoryFactory factory = new RAMDirectoryFactory() {
|
||||
@Override
|
||||
Directory openNew(String path) throws IOException {
|
||||
protected Directory create(String path) throws IOException {
|
||||
return directory;
|
||||
}
|
||||
};
|
||||
String path = "/fake/path";
|
||||
Directory dir1 = factory.open(path);
|
||||
Directory dir2 = factory.open(path);
|
||||
Directory dir1 = factory.get(path, null);
|
||||
Directory dir2 = factory.get(path, null);
|
||||
assertEquals("RAMDirectoryFactory should not create new instance of RefCntRamDirectory " +
|
||||
"every time open() is called for the same path", directory, dir1);
|
||||
assertEquals("RAMDirectoryFactory should not create new instance of RefCntRamDirectory " +
|
||||
"every time open() is called for the same path", directory, dir2);
|
||||
dir1.close();
|
||||
dir2.close();
|
||||
"every time open() is called for the same path", dir1, dir2);
|
||||
|
||||
factory.release(dir1);
|
||||
factory.release(dir2);
|
||||
}
|
||||
|
||||
private void dotestOpenSucceedForEmptyDir() throws IOException {
|
||||
RAMDirectoryFactory factory = new RAMDirectoryFactory();
|
||||
Directory dir = factory.open("/fake/path");
|
||||
Directory dir = factory.get("/fake/path", null);
|
||||
assertNotNull("RAMDirectoryFactory should create RefCntRamDirectory even if the path doen't lead " +
|
||||
"to index directory on the file system", dir);
|
||||
factory.release(dir);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,10 +18,9 @@
|
|||
package org.apache.solr.core;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.params.EventParams;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.apache.solr.common.params.EventParams;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -75,8 +74,8 @@ public class TestQuerySenderListener extends SolrTestCaseJ4 {
|
|||
String evt = mock.req.getParams().get(EventParams.EVENT);
|
||||
assertNotNull("Event is null", evt);
|
||||
assertTrue(evt + " is not equal to " + EventParams.FIRST_SEARCHER, evt.equals(EventParams.FIRST_SEARCHER) == true);
|
||||
Directory dir = currentSearcher.getIndexReader().directory();
|
||||
SolrIndexSearcher newSearcher = new SolrIndexSearcher(core, core.getSchema(), "testQuerySenderListener", dir, true, false);
|
||||
|
||||
SolrIndexSearcher newSearcher = new SolrIndexSearcher(core, core.getNewIndexDir(), core.getSchema(), core.getSolrConfig().mainIndexConfig, "testQuerySenderListener", true, false, core.getDirectoryFactory());
|
||||
|
||||
qsl.newSearcher(newSearcher, currentSearcher);
|
||||
evt = mock.req.getParams().get(EventParams.EVENT);
|
||||
|
|
|
@ -17,11 +17,8 @@ package org.apache.solr.core;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.params.EventParams;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.search.TestExtendedDismaxParser;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
@ -78,8 +75,7 @@ public class TestQuerySenderNoQuery extends SolrTestCaseJ4 {
|
|||
assertNotNull("Mock is null", mock);
|
||||
assertNull("Req (firstsearcher) is not null", mock.req);
|
||||
|
||||
Directory dir = currentSearcher.getIndexReader().directory();
|
||||
SolrIndexSearcher newSearcher = new SolrIndexSearcher(core, core.getSchema(), "testQuerySenderNoQuery", dir, true, false);
|
||||
SolrIndexSearcher newSearcher = new SolrIndexSearcher(core, core.getNewIndexDir(), core.getSchema(), core.getSolrConfig().mainIndexConfig, "testQuerySenderNoQuery", true, false, core.getDirectoryFactory());
|
||||
|
||||
qsl.newSearcher(newSearcher, currentSearcher); // get newSearcher.
|
||||
assertNull("Req (newsearcher) is not null", mock.req);
|
||||
|
|
|
@ -373,7 +373,7 @@ public class AutoCommitTest extends AbstractSolrTestCase {
|
|||
|
||||
// too low of a number can cause a slow host to commit before the test code checks that it
|
||||
// isn't there... causing a failure at "shouldn't find any"
|
||||
softTracker.timeUpperBound = 1000;
|
||||
softTracker.timeUpperBound = 1200;
|
||||
softTracker.docsUpperBound = -1;
|
||||
hardTracker.timeUpperBound = 3000;
|
||||
hardTracker.docsUpperBound = -1;
|
||||
|
@ -435,8 +435,12 @@ public class AutoCommitTest extends AbstractSolrTestCase {
|
|||
req.setContentStreams( toContentStreams(
|
||||
adoc("id", "531", "field_t", "what's inside?", "subject", "info"), null ) );
|
||||
handler.handleRequest( req, rsp );
|
||||
assertEquals( 2, softTracker.getCommitCount() );
|
||||
assertEquals( 1, hardTracker.getCommitCount() );
|
||||
|
||||
// depending on timing, you might see 2 or 3 soft commits
|
||||
int softCommitCnt = softTracker.getCommitCount();
|
||||
assertTrue("commit cnt:" + softCommitCnt, softCommitCnt == 2
|
||||
|| softCommitCnt == 3);
|
||||
assertEquals(1, hardTracker.getCommitCount());
|
||||
|
||||
assertQ("now it should", req("id:500") ,"//result[@numFound=1]" );
|
||||
assertQ("but not this", req("id:531") ,"//result[@numFound=0]" );
|
||||
|
|
|
@ -17,19 +17,18 @@
|
|||
|
||||
package org.apache.solr.client.solrj;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest.ACTION;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.util.ExternalPaths;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Abstract base class for testing merge indexes command
|
||||
*
|
||||
|
@ -39,6 +38,7 @@ import java.io.IOException;
|
|||
public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
||||
// protected static final CoreContainer cores = new CoreContainer();
|
||||
protected static CoreContainer cores;
|
||||
private String saveProp;
|
||||
private File dataDir2;
|
||||
|
||||
@Override
|
||||
|
@ -56,7 +56,9 @@ public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
|||
return getSolrHome() + "/core0/conf/solrconfig.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
saveProp = System.getProperty("solr.directoryFactory");
|
||||
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
|
||||
super.setUp();
|
||||
|
||||
|
@ -80,13 +82,15 @@ public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
|||
|
||||
String skip = System.getProperty("solr.test.leavedatadir");
|
||||
if (null != skip && 0 != skip.trim().length()) {
|
||||
System.err.println("NOTE: per solr.test.leavedatadir, dataDir2 will not be removed: " + dataDir2.getAbsolutePath());
|
||||
System.err.println("NOTE: per solr.test.leavedatadir, dataDir will not be removed: " + dataDir.getAbsolutePath());
|
||||
} else {
|
||||
if (!recurseDelete(dataDir2)) {
|
||||
if (!recurseDelete(dataDir)) {
|
||||
System.err.println("!!!! WARNING: best effort to remove " + dataDir.getAbsolutePath() + " FAILED !!!!!");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (saveProp == null) System.clearProperty("solr.directoryFactory");
|
||||
else System.setProperty("solr.directoryFactory", saveProp);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
package org.apache.solr.client.solrj;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
|
@ -26,12 +28,9 @@ import org.apache.solr.common.SolrInputDocument;
|
|||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.request.SolrRequestInfo;
|
||||
import org.apache.solr.util.ExternalPaths;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -42,7 +41,7 @@ public abstract class MultiCoreExampleTestBase extends SolrExampleTestBase
|
|||
// protected static final CoreContainer cores = new CoreContainer();
|
||||
protected static CoreContainer cores;
|
||||
private File dataDir2;
|
||||
|
||||
|
||||
@Override public String getSolrHome() { return ExternalPaths.EXAMPLE_MULTICORE_HOME; }
|
||||
|
||||
@Override public String getSchemaFile() { return getSolrHome()+"/core0/conf/schema.xml"; }
|
||||
|
|
|
@ -77,7 +77,6 @@ public class JettyWebappTest extends LuceneTestCase
|
|||
try {
|
||||
server.stop();
|
||||
} catch( Exception ex ) {}
|
||||
SolrTestCaseJ4.closeDirectories();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
|
|
@ -76,16 +76,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
|||
endTrackingSearchers();
|
||||
endTrackingWriters();
|
||||
}
|
||||
|
||||
// SOLR-2279: hack to shut these directories down
|
||||
// we still keep the ability to track open index files this way
|
||||
public static void closeDirectories() throws Exception {
|
||||
for (MockDirectoryWrapper d : stores.keySet()) {
|
||||
if (d.isOpen()) {
|
||||
d.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
@ -302,7 +292,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
|||
public static void deleteCore() throws Exception {
|
||||
log.info("###deleteCore" );
|
||||
if (h != null) { h.close(); }
|
||||
closeDirectories();
|
||||
if (dataDir != null) {
|
||||
String skip = System.getProperty("solr.test.leavedatadir");
|
||||
if (null != skip && 0 != skip.trim().length()) {
|
||||
|
|
|
@ -26,10 +26,10 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
/**
|
||||
* Opens a directory with {@link LuceneTestCase#newFSDirectory(File)}
|
||||
*/
|
||||
public class MockDirectoryFactory extends DirectoryFactory {
|
||||
public class MockDirectoryFactory extends CachingDirectoryFactory {
|
||||
|
||||
@Override
|
||||
public Directory open(String path) throws IOException {
|
||||
public Directory create(String path) throws IOException {
|
||||
return LuceneTestCase.newFSDirectory(new File(path));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -193,7 +193,6 @@ public abstract class AbstractSolrTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
if (h != null) { h.close(); }
|
||||
SolrTestCaseJ4.closeDirectories();
|
||||
String skip = System.getProperty("solr.test.leavedatadir");
|
||||
if (null != skip && 0 != skip.trim().length()) {
|
||||
System.err.println("NOTE: per solr.test.leavedatadir, dataDir will not be removed: " + dataDir.getAbsolutePath());
|
||||
|
|
Loading…
Reference in New Issue