mirror of https://github.com/apache/lucene.git
SOLR-3911: Make Directory and DirectoryFactory first class so that the majority of Solr's features work with any custom implementations.
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1402613 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
400d1e0290
commit
42f27adc1a
|
@ -52,6 +52,9 @@ New Features
|
|||
* SOLR-3985: ExternalFileField caches can be reloaded on firstSearcher/
|
||||
newSearcher events using the ExternalFileFieldReloader (Alan Woodward)
|
||||
|
||||
* SOLR-3911: Make Directory and DirectoryFactory first class so that the majority
|
||||
of Solr's features work with any custom implementations. (Mark Miller)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
|
|
@ -239,6 +239,13 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
|
|||
} catch (Throwable t) {
|
||||
try {
|
||||
core = cc.getCore(coreName);
|
||||
if (core == null) {
|
||||
cancelElection();
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR,
|
||||
"Fatal Error, SolrCore not found:" + coreName + " in "
|
||||
+ cc.getCoreNames());
|
||||
}
|
||||
|
||||
core.getCoreDescriptor().getCloudDescriptor().isLeader = false;
|
||||
|
||||
// we could not publish ourselves as leader - rejoin election
|
||||
|
@ -348,12 +355,15 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (core.getCoreDescriptor().getCloudDescriptor().getLastPublished().equals(ZkStateReader.ACTIVE)) {
|
||||
if (core.getCoreDescriptor().getCloudDescriptor().getLastPublished()
|
||||
.equals(ZkStateReader.ACTIVE)) {
|
||||
log.info("My last published State was Active, it's okay to be the leader.");
|
||||
return true;
|
||||
}
|
||||
|
||||
// TODO: and if no is a good candidate?
|
||||
log.info("My last published State was "
|
||||
+ core.getCoreDescriptor().getCloudDescriptor().getLastPublished()
|
||||
+ ", I won't be the leader.");
|
||||
// TODO: and if no one is a good candidate?
|
||||
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -193,8 +193,6 @@ public final class ZkController {
|
|||
ElectionContext context = new OverseerElectionContext(zkClient, overseer, getNodeName());
|
||||
overseerElector.joinElection(context, true);
|
||||
zkStateReader.createClusterStateWatchersAndUpdate();
|
||||
|
||||
// cc.newCmdDistribExecutor();
|
||||
|
||||
// we have to register as live first to pick up docs in the buffer
|
||||
createEphemeralLiveNode();
|
||||
|
@ -308,7 +306,11 @@ public final class ZkController {
|
|||
}
|
||||
|
||||
for (ElectionContext context : electionContexts.values()) {
|
||||
context.close();
|
||||
try {
|
||||
context.close();
|
||||
} catch (Throwable t) {
|
||||
log.error("Error closing overseer", t);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -62,7 +62,9 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
protected Map<Directory,List<CloseListener>> closeListeners = new HashMap<Directory,List<CloseListener>>();
|
||||
|
||||
public interface CloseListener {
|
||||
public void onClose();
|
||||
public void postClose();
|
||||
|
||||
public void preClose();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -109,6 +111,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
synchronized (this) {
|
||||
for (CacheValue val : byDirectoryCache.values()) {
|
||||
try {
|
||||
assert val.refCnt == 0 : val.refCnt;
|
||||
val.directory.close();
|
||||
} catch (Throwable t) {
|
||||
SolrException.log(log, "Error closing directory", t);
|
||||
|
@ -126,22 +129,33 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
throw new IllegalArgumentException("Unknown directory: " + directory
|
||||
+ " " + byDirectoryCache);
|
||||
}
|
||||
|
||||
log.debug("Closing: {}", cacheValue);
|
||||
log.info("Releasing directory:" + cacheValue.path);
|
||||
|
||||
cacheValue.refCnt--;
|
||||
|
||||
if (cacheValue.refCnt == 0 && cacheValue.doneWithDir) {
|
||||
log.info("Closing directory:" + cacheValue.path);
|
||||
directory.close();
|
||||
byDirectoryCache.remove(directory);
|
||||
byPathCache.remove(cacheValue.path);
|
||||
List<CloseListener> listeners = closeListeners.remove(directory);
|
||||
if (listeners != null) {
|
||||
for (CloseListener listener : listeners) {
|
||||
listener.onClose();
|
||||
listener.preClose();
|
||||
}
|
||||
}
|
||||
try {
|
||||
directory.close();
|
||||
} catch (Throwable t) {
|
||||
SolrException.log(log, "Error closing directory", t);
|
||||
}
|
||||
|
||||
if (listeners != null) {
|
||||
for (CloseListener listener : listeners) {
|
||||
listener.postClose();
|
||||
}
|
||||
closeListeners.remove(directory);
|
||||
}
|
||||
|
||||
byDirectoryCache.remove(directory);
|
||||
byPathCache.remove(cacheValue.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -178,19 +192,32 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
throws IOException {
|
||||
String fullPath = new File(path).getAbsolutePath();
|
||||
synchronized (this) {
|
||||
CacheValue cacheValue = byPathCache.get(fullPath);
|
||||
final CacheValue cacheValue = byPathCache.get(fullPath);
|
||||
Directory directory = null;
|
||||
if (cacheValue != null) {
|
||||
directory = cacheValue.directory;
|
||||
if (forceNew) {
|
||||
cacheValue.doneWithDir = true;
|
||||
|
||||
// we make a quick close attempt,
|
||||
// otherwise this should be closed
|
||||
// when whatever is using it, releases it
|
||||
|
||||
if (cacheValue.refCnt == 0) {
|
||||
close(cacheValue.directory);
|
||||
try {
|
||||
// the following will decref, so
|
||||
// first incref
|
||||
cacheValue.refCnt++;
|
||||
close(cacheValue.directory);
|
||||
} catch (IOException e) {
|
||||
SolrException.log(log, "Error closing directory", e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (directory == null || forceNew) {
|
||||
if (directory == null || forceNew) {
|
||||
directory = create(fullPath);
|
||||
|
||||
CacheValue newCacheValue = new CacheValue();
|
||||
|
|
|
@ -21,8 +21,12 @@ import java.io.Closeable;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.core.CachingDirectoryFactory.CloseListener;
|
||||
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Provides access to a Directory implementation. You must release every
|
||||
|
@ -31,6 +35,8 @@ import org.apache.solr.util.plugin.NamedListInitializedPlugin;
|
|||
public abstract class DirectoryFactory implements NamedListInitializedPlugin,
|
||||
Closeable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(DirectoryFactory.class.getName());
|
||||
|
||||
/**
|
||||
* Indicates a Directory will no longer be used, and when it's ref count
|
||||
* hits 0, it can be closed. On shutdown all directories will be closed
|
||||
|
@ -65,10 +71,33 @@ public abstract class DirectoryFactory implements NamedListInitializedPlugin,
|
|||
*/
|
||||
public abstract boolean exists(String path);
|
||||
|
||||
/**
|
||||
* Removes the Directory's persistent storage.
|
||||
* For example: A file system impl may remove the
|
||||
* on disk directory.
|
||||
* @throws IOException If there is a low-level I/O error.
|
||||
*
|
||||
*/
|
||||
public abstract void remove(Directory dir) throws IOException;
|
||||
|
||||
/**
|
||||
* Override for more efficient moves.
|
||||
*
|
||||
* @throws IOException If there is a low-level I/O error.
|
||||
*/
|
||||
public void move(Directory fromDir, Directory toDir, String fileName) throws IOException {
|
||||
fromDir.copy(toDir, fileName, fileName, IOContext.DEFAULT);
|
||||
fromDir.deleteFile(fileName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Directory for a given path, using the specified rawLockType.
|
||||
* Will return the same Directory instance for the same path.
|
||||
*
|
||||
* Note: sometimes you might pass null for the rawLockType when
|
||||
* you know the Directory exists and the rawLockType is already
|
||||
* in use.
|
||||
*
|
||||
* @throws IOException If there is a low-level I/O error.
|
||||
*/
|
||||
public abstract Directory get(String path, String rawLockType)
|
||||
|
@ -101,4 +130,58 @@ public abstract class DirectoryFactory implements NamedListInitializedPlugin,
|
|||
*/
|
||||
public abstract void release(Directory directory) throws IOException;
|
||||
|
||||
|
||||
/**
|
||||
* Normalize a given path.
|
||||
*
|
||||
* @param path to normalize
|
||||
* @return normalized path
|
||||
* @throws IOException on io error
|
||||
*/
|
||||
public String normalize(String path) throws IOException {
|
||||
return path;
|
||||
}
|
||||
|
||||
public static long sizeOfDirectory(Directory directory) throws IOException {
|
||||
final String[] files = directory.listAll();
|
||||
long size = 0;
|
||||
|
||||
for (final String file : files) {
|
||||
size += sizeOf(directory, file);
|
||||
if (size < 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
public static long sizeOf(Directory directory, String file) throws IOException {
|
||||
if (!directory.fileExists(file)) {
|
||||
throw new IllegalArgumentException(file + " does not exist");
|
||||
}
|
||||
|
||||
return directory.fileLength(file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the files in the Directory
|
||||
*/
|
||||
public static boolean empty(Directory dir) {
|
||||
boolean isSuccess = true;
|
||||
String contents[];
|
||||
try {
|
||||
contents = dir.listAll();
|
||||
if (contents != null) {
|
||||
for (String file : contents) {
|
||||
dir.deleteFile(file);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
SolrException.log(log, "Error deleting files from Directory", e);
|
||||
isSuccess = false;
|
||||
}
|
||||
return isSuccess;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
package org.apache.solr.core;
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
/**
|
||||
* Directory provider for implementations that do not persist over reboots.
|
||||
*
|
||||
*/
|
||||
public abstract class EphemeralDirectoryFactory extends CachingDirectoryFactory {
|
||||
|
||||
@Override
|
||||
public boolean exists(String path) {
|
||||
String fullPath = new File(path).getAbsolutePath();
|
||||
synchronized (this) {
|
||||
CacheValue cacheValue = byPathCache.get(fullPath);
|
||||
Directory directory = null;
|
||||
if (cacheValue != null) {
|
||||
directory = cacheValue.directory;
|
||||
}
|
||||
if (directory == null) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove(Directory dir) throws IOException {
|
||||
// ram dir does not persist its dir anywhere
|
||||
}
|
||||
|
||||
@Override
|
||||
public String normalize(String path) throws IOException {
|
||||
return path;
|
||||
}
|
||||
}
|
|
@ -39,7 +39,7 @@ import java.io.IOException;
|
|||
* </ul>
|
||||
*
|
||||
**/
|
||||
public class MMapDirectoryFactory extends CachingDirectoryFactory {
|
||||
public class MMapDirectoryFactory extends StandardDirectoryFactory {
|
||||
private transient static Logger log = LoggerFactory.getLogger(MMapDirectoryFactory.class);
|
||||
boolean unmapHack;
|
||||
private int maxChunk;
|
||||
|
|
|
@ -27,11 +27,11 @@ import java.io.IOException;
|
|||
* Factory to instantiate {@link org.apache.lucene.store.NIOFSDirectory}
|
||||
*
|
||||
**/
|
||||
public class NIOFSDirectoryFactory extends CachingDirectoryFactory {
|
||||
public class NIOFSDirectoryFactory extends StandardDirectoryFactory {
|
||||
|
||||
@Override
|
||||
protected Directory create(String path) throws IOException {
|
||||
|
||||
return new NIOFSDirectory(new File(path));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.solr.core;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -26,28 +25,11 @@ import org.apache.lucene.store.RAMDirectory;
|
|||
/**
|
||||
* Factory to instantiate {@link org.apache.lucene.store.RAMDirectory}
|
||||
*/
|
||||
public class RAMDirectoryFactory extends StandardDirectoryFactory {
|
||||
public class RAMDirectoryFactory extends EphemeralDirectoryFactory {
|
||||
|
||||
@Override
|
||||
protected Directory create(String path) throws IOException {
|
||||
return new RAMDirectory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists(String path) {
|
||||
String fullPath = new File(path).getAbsolutePath();
|
||||
synchronized (this) {
|
||||
CacheValue cacheValue = byPathCache.get(fullPath);
|
||||
Directory directory = null;
|
||||
if (cacheValue != null) {
|
||||
directory = cacheValue.directory;
|
||||
}
|
||||
if (directory == null) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,10 +27,11 @@ import java.io.IOException;
|
|||
* Factory to instantiate {@link org.apache.lucene.store.SimpleFSDirectory}
|
||||
*
|
||||
**/
|
||||
public class SimpleFSDirectoryFactory extends CachingDirectoryFactory {
|
||||
public class SimpleFSDirectoryFactory extends StandardDirectoryFactory {
|
||||
|
||||
@Override
|
||||
protected Directory create(String path) throws IOException {
|
||||
return new SimpleFSDirectory(new File(path));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -24,6 +24,8 @@ import org.apache.lucene.index.IndexDeletionPolicy;
|
|||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.solr.cloud.CloudDescriptor;
|
||||
import org.apache.solr.common.SolrException;
|
||||
|
@ -84,8 +86,9 @@ import org.slf4j.LoggerFactory;
|
|||
import org.xml.sax.SAXException;
|
||||
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Writer;
|
||||
|
@ -214,13 +217,8 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
return dataDir;
|
||||
}
|
||||
|
||||
public String getIndexDir() {
|
||||
synchronized (searcherLock) {
|
||||
if (_searcher == null)
|
||||
return dataDir + "index/";
|
||||
SolrIndexSearcher searcher = _searcher.get();
|
||||
return searcher.getIndexDir() == null ? dataDir + "index/" : searcher.getIndexDir();
|
||||
}
|
||||
public String getIndexDir() {
|
||||
return dataDir + "index/";
|
||||
}
|
||||
|
||||
|
||||
|
@ -234,23 +232,55 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
*/
|
||||
public String getNewIndexDir() {
|
||||
String result = dataDir + "index/";
|
||||
File propsFile = new File(dataDir + "index.properties");
|
||||
if (propsFile.exists()) {
|
||||
Properties p = new Properties();
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(propsFile);
|
||||
p.load(is);
|
||||
} catch (IOException e) {
|
||||
/*no op*/
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
Properties p = new Properties();
|
||||
Directory dir = null;
|
||||
try {
|
||||
dir = getDirectoryFactory().get(getDataDir(), null);
|
||||
if (dir.fileExists("index.properties")){
|
||||
final IndexInput input = dir.openInput("index.properties", IOContext.DEFAULT);
|
||||
|
||||
final InputStream is = new InputStream() {
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
byte next;
|
||||
try {
|
||||
next = input.readByte();
|
||||
} catch (EOFException e) {
|
||||
return -1;
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
input.close();
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
p.load(is);
|
||||
|
||||
String s = p.getProperty("index");
|
||||
if (s != null && s.trim().length() > 0) {
|
||||
result = dataDir + s;
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to load index.properties", e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
}
|
||||
String s = p.getProperty("index");
|
||||
if (s != null && s.trim().length() > 0) {
|
||||
File tmp = new File(dataDir + s);
|
||||
if (tmp.exists() && tmp.isDirectory())
|
||||
result = dataDir + s;
|
||||
} catch (IOException e) {
|
||||
SolrException.log(log, "", e);
|
||||
} finally {
|
||||
|
||||
try {
|
||||
getDirectoryFactory().release(dir);
|
||||
} catch (IOException e) {
|
||||
SolrException.log(log, "", e);
|
||||
}
|
||||
}
|
||||
if (!result.equals(lastNewIndexDir)) {
|
||||
|
@ -365,6 +395,11 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
return responseWriters.put(name, responseWriter);
|
||||
}
|
||||
|
||||
public SolrCore reload(SolrCore prev) throws IOException,
|
||||
ParserConfigurationException, SAXException {
|
||||
return reload(prev.getResourceLoader(), prev);
|
||||
}
|
||||
|
||||
public SolrCore reload(SolrResourceLoader resourceLoader, SolrCore prev) throws IOException,
|
||||
ParserConfigurationException, SAXException {
|
||||
// TODO - what if indexwriter settings have changed
|
||||
|
@ -379,6 +414,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
|
||||
SolrCore core = new SolrCore(getName(), getDataDir(), config,
|
||||
schema, coreDescriptor, updateHandler, prev);
|
||||
core.solrDelPolicy = this.solrDelPolicy;
|
||||
return core;
|
||||
}
|
||||
|
||||
|
@ -393,9 +429,11 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
DirectoryFactory dirFactory;
|
||||
PluginInfo info = solrConfig.getPluginInfo(DirectoryFactory.class.getName());
|
||||
if (info != null) {
|
||||
log.info(info.className);
|
||||
dirFactory = getResourceLoader().newInstance(info.className, DirectoryFactory.class);
|
||||
dirFactory.init(info.initArgs);
|
||||
} else {
|
||||
log.info("solr.NRTCachingDirectoryFactory");
|
||||
dirFactory = new NRTCachingDirectoryFactory();
|
||||
}
|
||||
// And set it
|
||||
|
@ -417,8 +455,8 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
// protect via synchronized(SolrCore.class)
|
||||
private static Set<String> dirs = new HashSet<String>();
|
||||
|
||||
void initIndex(boolean reload) {
|
||||
try {
|
||||
void initIndex(boolean reload) throws IOException {
|
||||
|
||||
String indexDir = getNewIndexDir();
|
||||
boolean indexExists = getDirectoryFactory().exists(indexDir);
|
||||
boolean firstTime;
|
||||
|
@ -430,19 +468,28 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
initIndexReaderFactory();
|
||||
|
||||
if (indexExists && firstTime && !reload) {
|
||||
// to remove locks, the directory must already exist... so we create it
|
||||
// if it didn't exist already...
|
||||
Directory dir = directoryFactory.get(indexDir, getSolrConfig().indexConfig.lockType);
|
||||
if (dir != null) {
|
||||
|
||||
Directory dir = directoryFactory.get(indexDir,
|
||||
getSolrConfig().indexConfig.lockType);
|
||||
try {
|
||||
if (IndexWriter.isLocked(dir)) {
|
||||
if (removeLocks) {
|
||||
log.warn(logid + "WARNING: Solr index directory '{}' is locked. Unlocking...", indexDir);
|
||||
log.warn(
|
||||
logid
|
||||
+ "WARNING: Solr index directory '{}' is locked. Unlocking...",
|
||||
indexDir);
|
||||
IndexWriter.unlock(dir);
|
||||
} else {
|
||||
log.error(logid + "Solr index directory '{}' is locked. Throwing exception", indexDir);
|
||||
throw new LockObtainFailedException("Index locked for write for core " + name);
|
||||
log.error(logid
|
||||
+ "Solr index directory '{}' is locked. Throwing exception",
|
||||
indexDir);
|
||||
throw new LockObtainFailedException(
|
||||
"Index locked for write for core " + name);
|
||||
}
|
||||
|
||||
directoryFactory.release(dir);
|
||||
}
|
||||
} finally {
|
||||
directoryFactory.release(dir);
|
||||
}
|
||||
}
|
||||
|
@ -456,9 +503,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
writer.close();
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Creates an instance by trying a constructor that accepts a SolrCore before
|
||||
|
@ -636,42 +681,45 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
|
||||
booleanQueryMaxClauseCount();
|
||||
|
||||
initListeners();
|
||||
|
||||
initDeletionPolicy();
|
||||
|
||||
this.codec= initCodec(solrConfig, schema);
|
||||
|
||||
if (updateHandler == null) {
|
||||
initDirectoryFactory();
|
||||
solrCoreState = new DefaultSolrCoreState(getDirectoryFactory());
|
||||
} else {
|
||||
solrCoreState = updateHandler.getSolrCoreState();
|
||||
directoryFactory = solrCoreState.getDirectoryFactory();
|
||||
this.isReloaded = true;
|
||||
}
|
||||
|
||||
initIndex(prev != null);
|
||||
|
||||
initWriters();
|
||||
initQParsers();
|
||||
initValueSourceParsers();
|
||||
initTransformerFactories();
|
||||
|
||||
this.searchComponents = Collections.unmodifiableMap(loadSearchComponents());
|
||||
|
||||
// Processors initialized before the handlers
|
||||
updateProcessorChains = loadUpdateProcessorChains();
|
||||
reqHandlers = new RequestHandlers(this);
|
||||
reqHandlers.initHandlersFromConfig( solrConfig );
|
||||
|
||||
|
||||
// Handle things that should eventually go away
|
||||
initDeprecatedSupport();
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
|
||||
try {
|
||||
|
||||
initListeners();
|
||||
|
||||
if (updateHandler == null) {
|
||||
initDeletionPolicy();
|
||||
}
|
||||
|
||||
this.codec = initCodec(solrConfig, schema);
|
||||
|
||||
if (updateHandler == null) {
|
||||
initDirectoryFactory();
|
||||
solrCoreState = new DefaultSolrCoreState(getDirectoryFactory());
|
||||
} else {
|
||||
solrCoreState = updateHandler.getSolrCoreState();
|
||||
directoryFactory = solrCoreState.getDirectoryFactory();
|
||||
this.isReloaded = true;
|
||||
}
|
||||
|
||||
initIndex(prev != null);
|
||||
|
||||
initWriters();
|
||||
initQParsers();
|
||||
initValueSourceParsers();
|
||||
initTransformerFactories();
|
||||
|
||||
this.searchComponents = Collections
|
||||
.unmodifiableMap(loadSearchComponents());
|
||||
|
||||
// Processors initialized before the handlers
|
||||
updateProcessorChains = loadUpdateProcessorChains();
|
||||
reqHandlers = new RequestHandlers(this);
|
||||
reqHandlers.initHandlersFromConfig(solrConfig);
|
||||
|
||||
// Handle things that should eventually go away
|
||||
initDeprecatedSupport();
|
||||
|
||||
// cause the executor to stall so firstSearcher events won't fire
|
||||
// until after inform() has been called for all components.
|
||||
// searchExecutor must be single-threaded for this to work
|
||||
|
@ -681,7 +729,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// use the (old) writer to open the first searcher
|
||||
RefCounted<IndexWriter> iwRef = null;
|
||||
if (prev != null) {
|
||||
|
@ -696,20 +744,22 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Open the searcher *before* the update handler so we don't end up opening
|
||||
|
||||
// Open the searcher *before* the update handler so we don't end up
|
||||
// opening
|
||||
// one in the middle.
|
||||
// With lockless commits in Lucene now, this probably shouldn't be an issue anymore
|
||||
|
||||
// With lockless commits in Lucene now, this probably shouldn't be an
|
||||
// issue anymore
|
||||
|
||||
try {
|
||||
getSearcher(false,false,null,true);
|
||||
getSearcher(false, false, null, true);
|
||||
} finally {
|
||||
newReaderCreator = null;
|
||||
if (iwRef != null) iwRef.decref();
|
||||
}
|
||||
|
||||
|
||||
String updateHandlerClass = solrConfig.getUpdateHandlerInfo().className;
|
||||
|
||||
|
||||
if (updateHandler == null) {
|
||||
this.updateHandler = createUpdateHandler(updateHandlerClass == null ? DirectUpdateHandler2.class
|
||||
.getName() : updateHandlerClass);
|
||||
|
@ -719,10 +769,10 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
: updateHandlerClass, updateHandler);
|
||||
}
|
||||
infoRegistry.put("updateHandler", this.updateHandler);
|
||||
|
||||
|
||||
// Finally tell anyone who wants to know
|
||||
resourceLoader.inform( resourceLoader );
|
||||
resourceLoader.inform( this ); // last call before the latch is released.
|
||||
resourceLoader.inform(resourceLoader);
|
||||
resourceLoader.inform(this); // last call before the latch is released.
|
||||
} catch (Throwable e) {
|
||||
latch.countDown();//release the latch, otherwise we block trying to do the close. This should be fine, since counting down on a latch of 0 is still fine
|
||||
//close down the searcher and any other resources, if it exists, as this is not recoverable
|
||||
|
@ -911,11 +961,14 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
SolrException.log(log,e);
|
||||
}
|
||||
|
||||
|
||||
if (updateHandler instanceof IndexWriterCloser) {
|
||||
decrefSolrCoreState((IndexWriterCloser)updateHandler);
|
||||
} else {
|
||||
decrefSolrCoreState(null);
|
||||
try {
|
||||
if (updateHandler instanceof IndexWriterCloser) {
|
||||
decrefSolrCoreState((IndexWriterCloser) updateHandler);
|
||||
} else {
|
||||
decrefSolrCoreState(null);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
SolrException.log(log, e);
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -948,12 +1001,14 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
SolrException.log(log,e);
|
||||
}
|
||||
|
||||
synchronized (solrCoreState) {
|
||||
if (solrCoreStateRefCnt == 0) {
|
||||
try {
|
||||
directoryFactory.close();
|
||||
} catch (Throwable t) {
|
||||
SolrException.log(log, t);
|
||||
if (solrCoreState != null) { // bad startup case
|
||||
synchronized (solrCoreState) {
|
||||
if (solrCoreStateRefCnt == 0) {
|
||||
try {
|
||||
directoryFactory.close();
|
||||
} catch (Throwable t) {
|
||||
SolrException.log(log, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1271,13 +1326,13 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
openSearcherLock.lock();
|
||||
try {
|
||||
String newIndexDir = getNewIndexDir();
|
||||
File indexDirFile = null;
|
||||
File newIndexDirFile = null;
|
||||
String indexDirFile = null;
|
||||
String newIndexDirFile = null;
|
||||
|
||||
// if it's not a normal near-realtime update, check that paths haven't changed.
|
||||
if (!nrt) {
|
||||
indexDirFile = new File(getIndexDir()).getCanonicalFile();
|
||||
newIndexDirFile = new File(newIndexDir).getCanonicalFile();
|
||||
indexDirFile = getDirectoryFactory().normalize(getIndexDir());
|
||||
newIndexDirFile = getDirectoryFactory().normalize(newIndexDir);
|
||||
}
|
||||
|
||||
synchronized (searcherLock) {
|
||||
|
@ -1305,6 +1360,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
} else {
|
||||
// verbose("start reopen without writer, reader=", currentReader);
|
||||
newReader = DirectoryReader.openIfChanged(currentReader);
|
||||
|
||||
// verbose("reopen result", newReader);
|
||||
}
|
||||
|
||||
|
|
|
@ -132,7 +132,11 @@ public class SolrDeletionPolicy implements IndexDeletionPolicy, NamedListInitial
|
|||
synchronized (this) {
|
||||
long maxCommitAgeTimeStamp = -1L;
|
||||
IndexCommit newest = commits.get(commits.size() - 1);
|
||||
log.info("newest commit = " + newest.getGeneration());
|
||||
try {
|
||||
log.info("newest commit = " + newest.getGeneration() + newest.getFileNames().toString());
|
||||
} catch (IOException e1) {
|
||||
throw new RuntimeException();
|
||||
}
|
||||
|
||||
int singleSegKept = (newest.getSegmentCount() == 1) ? 1 : 0;
|
||||
int totalKept = 1;
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.core;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
|
||||
|
@ -26,6 +27,9 @@ import org.apache.lucene.store.FSDirectory;
|
|||
* Directory provider which mimics original Solr
|
||||
* {@link org.apache.lucene.store.FSDirectory} based behavior.
|
||||
*
|
||||
* File based DirectoryFactory implementations generally extend
|
||||
* this class.
|
||||
*
|
||||
*/
|
||||
public class StandardDirectoryFactory extends CachingDirectoryFactory {
|
||||
|
||||
|
@ -33,4 +37,41 @@ public class StandardDirectoryFactory extends CachingDirectoryFactory {
|
|||
protected Directory create(String path) throws IOException {
|
||||
return FSDirectory.open(new File(path));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String normalize(String path) throws IOException {
|
||||
return new File(path).getCanonicalPath();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove(Directory dir) throws IOException {
|
||||
CacheValue val = byDirectoryCache.get(dir);
|
||||
if (val == null) {
|
||||
throw new NullPointerException("Unknown directory " + dir);
|
||||
}
|
||||
File dirFile = new File(val.path);
|
||||
FileUtils.deleteDirectory(dirFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Override for more efficient moves.
|
||||
*
|
||||
* @throws IOException
|
||||
* If there is a low-level I/O error.
|
||||
*/
|
||||
public void move(Directory fromDir, Directory toDir, String fileName)
|
||||
throws IOException {
|
||||
if (fromDir instanceof FSDirectory && toDir instanceof FSDirectory) {
|
||||
File dir1 = ((FSDirectory) fromDir).getDirectory();
|
||||
File dir2 = ((FSDirectory) toDir).getDirectory();
|
||||
File indexFileInTmpDir = new File(dir1, fileName);
|
||||
File indexFileInIndex = new File(dir2, fileName);
|
||||
boolean success = indexFileInTmpDir.renameTo(indexFileInIndex);
|
||||
if (success) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
super.move(fromDir, toDir, fileName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,11 +38,13 @@ import java.util.zip.Adler32;
|
|||
import java.util.zip.Checksum;
|
||||
import java.util.zip.DeflaterOutputStream;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.IndexDeletionPolicy;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
|
@ -53,6 +55,7 @@ import org.apache.solr.common.util.NamedList;
|
|||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.common.util.StrUtils;
|
||||
import org.apache.solr.core.CloseHook;
|
||||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.core.IndexDeletionPolicyWrapper;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.core.SolrDeletionPolicy;
|
||||
|
@ -204,9 +207,6 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
rsp.add(STATUS,ERR_STATUS);
|
||||
rsp.add("message","No slave configured");
|
||||
}
|
||||
} else if (command.equals(CMD_FILE_CHECKSUM)) {
|
||||
// this command is not used by anyone
|
||||
getFileChecksum(solrParams, rsp);
|
||||
} else if (command.equals(CMD_SHOW_COMMITS)) {
|
||||
rsp.add(CMD_SHOW_COMMITS, getCommits());
|
||||
} else if (command.equals(CMD_DETAILS)) {
|
||||
|
@ -239,30 +239,6 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
return l;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the checksum of a file
|
||||
*/
|
||||
private void getFileChecksum(SolrParams solrParams, SolrQueryResponse rsp) {
|
||||
Checksum checksum = new Adler32();
|
||||
File dir = new File(core.getIndexDir());
|
||||
rsp.add(CHECKSUM, getCheckSums(solrParams.getParams(FILE), dir, checksum));
|
||||
dir = new File(core.getResourceLoader().getConfigDir());
|
||||
rsp.add(CONF_CHECKSUM, getCheckSums(solrParams.getParams(CONF_FILE_SHORT), dir, checksum));
|
||||
}
|
||||
|
||||
private Map<String, Long> getCheckSums(String[] files, File dir, Checksum checksum) {
|
||||
Map<String, Long> checksumMap = new HashMap<String, Long>();
|
||||
if (files == null || files.length == 0)
|
||||
return checksumMap;
|
||||
for (String file : files) {
|
||||
File f = new File(dir, file);
|
||||
Long checkSumVal = getCheckSum(checksum, f);
|
||||
if (checkSumVal != null)
|
||||
checksumMap.put(file, checkSumVal);
|
||||
}
|
||||
return checksumMap;
|
||||
}
|
||||
|
||||
static Long getCheckSum(Checksum checksum, File f) {
|
||||
FileInputStream fis = null;
|
||||
checksum.reset();
|
||||
|
@ -343,15 +319,22 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
}
|
||||
|
||||
/**
|
||||
* This method adds an Object of FileStream to the resposnse . The FileStream implements a custom protocol which is
|
||||
* This method adds an Object of FileStream to the response . The FileStream implements a custom protocol which is
|
||||
* understood by SnapPuller.FileFetcher
|
||||
*
|
||||
* @see org.apache.solr.handler.SnapPuller.FileFetcher
|
||||
* @see org.apache.solr.handler.SnapPuller.LocalFsFileFetcher
|
||||
* @see org.apache.solr.handler.SnapPuller.DirectoryFileFetcher
|
||||
*/
|
||||
private void getFileStream(SolrParams solrParams, SolrQueryResponse rsp) {
|
||||
ModifiableSolrParams rawParams = new ModifiableSolrParams(solrParams);
|
||||
rawParams.set(CommonParams.WT, FILE_STREAM);
|
||||
rsp.add(FILE_STREAM, new FileStream(solrParams));
|
||||
|
||||
String cfileName = solrParams.get(CONF_FILE_SHORT);
|
||||
if (cfileName != null) {
|
||||
rsp.add(FILE_STREAM, new LocalFsFileStream(solrParams));
|
||||
} else {
|
||||
rsp.add(FILE_STREAM, new DirectoryFileStream(solrParams));
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -372,21 +355,29 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
// reserve the indexcommit for sometime
|
||||
core.getDeletionPolicy().setReserveDuration(gen, reserveCommitDuration);
|
||||
List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
|
||||
Directory dir = null;
|
||||
try {
|
||||
//get all the files in the commit
|
||||
//use a set to workaround possible Lucene bug which returns same file name multiple times
|
||||
// get all the files in the commit
|
||||
// use a set to workaround possible Lucene bug which returns same file
|
||||
// name multiple times
|
||||
Collection<String> files = new HashSet<String>(commit.getFileNames());
|
||||
for (String fileName : files) {
|
||||
if(fileName.endsWith(".lock")) continue;
|
||||
File file = new File(core.getIndexDir(), fileName);
|
||||
Map<String, Object> fileMeta = getFileInfo(file);
|
||||
result.add(fileMeta);
|
||||
dir = core.getDirectoryFactory().get(core.getNewIndexDir(), null);
|
||||
try {
|
||||
|
||||
for (String fileName : files) {
|
||||
if (fileName.endsWith(".lock")) continue;
|
||||
Map<String,Object> fileMeta = new HashMap<String,Object>();
|
||||
fileMeta.put(NAME, fileName);
|
||||
fileMeta.put(SIZE, dir.fileLength(fileName));
|
||||
result.add(fileMeta);
|
||||
}
|
||||
} finally {
|
||||
core.getDirectoryFactory().release(dir);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
rsp.add("status", "unable to get file names for given index generation");
|
||||
rsp.add("exception", e);
|
||||
LOG.warn("Unable to get file names for indexCommit generation: "
|
||||
+ gen, e);
|
||||
LOG.error("Unable to get file names for indexCommit generation: " + gen, e);
|
||||
}
|
||||
rsp.add(CMD_GET_FILE_LIST, result);
|
||||
if (confFileNameAlias.size() < 1 || core.getCoreDescriptor().getCoreContainer().isZooKeeperAware())
|
||||
|
@ -444,7 +435,6 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
Map<String, Object> map = new HashMap<String, Object>();
|
||||
map.put(NAME, name);
|
||||
map.put(SIZE, size);
|
||||
map.put(LAST_MODIFIED, lastmodified);
|
||||
map.put(CHECKSUM, checksum);
|
||||
return map;
|
||||
}
|
||||
|
@ -474,18 +464,19 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
}
|
||||
|
||||
long getIndexSize() {
|
||||
return FileUtils.sizeOfDirectory(new File(core.getIndexDir()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects the details such as name, size ,lastModified of a file
|
||||
*/
|
||||
private Map<String, Object> getFileInfo(File file) {
|
||||
Map<String, Object> fileMeta = new HashMap<String, Object>();
|
||||
fileMeta.put(NAME, file.getName());
|
||||
fileMeta.put(SIZE, file.length());
|
||||
fileMeta.put(LAST_MODIFIED, file.lastModified());
|
||||
return fileMeta;
|
||||
Directory dir;
|
||||
long size = 0;
|
||||
try {
|
||||
dir = core.getDirectoryFactory().get(core.getIndexDir(), null);
|
||||
try {
|
||||
size = DirectoryFactory.sizeOfDirectory(dir);
|
||||
} finally {
|
||||
core.getDirectoryFactory().release(dir);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
SolrException.log(LOG, "IO error while trying to get the size of the Directory", e);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -885,7 +876,8 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
}
|
||||
|
||||
// reboot the writer on the new index
|
||||
core.getUpdateHandler().newIndexWriter(true);
|
||||
// TODO: perhaps this is no longer necessary then?
|
||||
// core.getUpdateHandler().newIndexWriter(true);
|
||||
|
||||
} catch (IOException e) {
|
||||
LOG.warn("Unable to get IndexCommit on startup", e);
|
||||
|
@ -936,7 +928,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
private void registerFileStreamResponseWriter() {
|
||||
core.registerResponseWriter(FILE_STREAM, new BinaryQueryResponseWriter() {
|
||||
public void write(OutputStream out, SolrQueryRequest request, SolrQueryResponse resp) throws IOException {
|
||||
FileStream stream = (FileStream) resp.getValues().get(FILE_STREAM);
|
||||
DirectoryFileStream stream = (DirectoryFileStream) resp.getValues().get(FILE_STREAM);
|
||||
stream.write(out);
|
||||
}
|
||||
|
||||
|
@ -1009,19 +1001,113 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
};
|
||||
}
|
||||
|
||||
private class FileStream {
|
||||
private SolrParams params;
|
||||
private class DirectoryFileStream {
|
||||
protected SolrParams params;
|
||||
|
||||
private FastOutputStream fos;
|
||||
protected FastOutputStream fos;
|
||||
|
||||
private Long indexGen;
|
||||
private IndexDeletionPolicyWrapper delPolicy;
|
||||
protected Long indexGen;
|
||||
protected IndexDeletionPolicyWrapper delPolicy;
|
||||
|
||||
public FileStream(SolrParams solrParams) {
|
||||
public DirectoryFileStream(SolrParams solrParams) {
|
||||
params = solrParams;
|
||||
delPolicy = core.getDeletionPolicy();
|
||||
}
|
||||
|
||||
public void write(OutputStream out) throws IOException {
|
||||
String fileName = params.get(FILE);
|
||||
String cfileName = params.get(CONF_FILE_SHORT);
|
||||
String sOffset = params.get(OFFSET);
|
||||
String sLen = params.get(LEN);
|
||||
String compress = params.get(COMPRESSION);
|
||||
String sChecksum = params.get(CHECKSUM);
|
||||
String sGen = params.get(GENERATION);
|
||||
if (sGen != null) indexGen = Long.parseLong(sGen);
|
||||
if (Boolean.parseBoolean(compress)) {
|
||||
fos = new FastOutputStream(new DeflaterOutputStream(out));
|
||||
} else {
|
||||
fos = new FastOutputStream(out);
|
||||
}
|
||||
|
||||
int packetsWritten = 0;
|
||||
IndexInput in = null;
|
||||
try {
|
||||
long offset = -1;
|
||||
int len = -1;
|
||||
// check if checksum is requested
|
||||
boolean useChecksum = Boolean.parseBoolean(sChecksum);
|
||||
if (sOffset != null) offset = Long.parseLong(sOffset);
|
||||
if (sLen != null) len = Integer.parseInt(sLen);
|
||||
if (fileName == null && cfileName == null) {
|
||||
// no filename do nothing
|
||||
writeNothing();
|
||||
}
|
||||
|
||||
RefCounted<SolrIndexSearcher> sref = core.getSearcher();
|
||||
Directory dir;
|
||||
try {
|
||||
SolrIndexSearcher searcher = sref.get();
|
||||
dir = searcher.getIndexReader().directory();
|
||||
} finally {
|
||||
sref.decref();
|
||||
}
|
||||
in = dir.openInput(fileName, IOContext.READONCE);
|
||||
// if offset is mentioned move the pointer to that point
|
||||
if (offset != -1) in.seek(offset);
|
||||
byte[] buf = new byte[(len == -1 || len > PACKET_SZ) ? PACKET_SZ : len];
|
||||
Checksum checksum = null;
|
||||
if (useChecksum) checksum = new Adler32();
|
||||
|
||||
long filelen = dir.fileLength(fileName);
|
||||
while (true) {
|
||||
offset = offset == -1 ? 0 : offset;
|
||||
int read = (int) Math.min(buf.length, filelen - offset);
|
||||
in.readBytes(buf, offset == -1 ? 0 : (int) offset, read);
|
||||
|
||||
fos.writeInt((int) read);
|
||||
if (useChecksum) {
|
||||
checksum.reset();
|
||||
checksum.update(buf, 0, read);
|
||||
fos.writeLong(checksum.getValue());
|
||||
}
|
||||
fos.write(buf, 0, read);
|
||||
fos.flush();
|
||||
if (indexGen != null && (packetsWritten % 5 == 0)) {
|
||||
// after every 5 packets reserve the commitpoint for some time
|
||||
delPolicy.setReserveDuration(indexGen, reserveCommitDuration);
|
||||
}
|
||||
packetsWritten++;
|
||||
if (read != buf.length) {
|
||||
writeNothing();
|
||||
fos.close();
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOG.warn("Exception while writing response for params: " + params, e);
|
||||
} finally {
|
||||
if (in != null) {
|
||||
in.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Used to write a marker for EOF
|
||||
*/
|
||||
protected void writeNothing() throws IOException {
|
||||
fos.writeInt(0);
|
||||
fos.flush();
|
||||
}
|
||||
}
|
||||
|
||||
private class LocalFsFileStream extends DirectoryFileStream {
|
||||
|
||||
public LocalFsFileStream(SolrParams solrParams) {
|
||||
super(solrParams);
|
||||
}
|
||||
|
||||
public void write(OutputStream out) throws IOException {
|
||||
String fileName = params.get(FILE);
|
||||
String cfileName = params.get(CONF_FILE_SHORT);
|
||||
|
@ -1053,13 +1139,10 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
}
|
||||
|
||||
File file = null;
|
||||
if (cfileName != null) {
|
||||
//if if is a conf file read from config diectory
|
||||
file = new File(core.getResourceLoader().getConfigDir(), cfileName);
|
||||
} else {
|
||||
//else read from the indexdirectory
|
||||
file = new File(core.getIndexDir(), fileName);
|
||||
}
|
||||
|
||||
//if if is a conf file read from config diectory
|
||||
file = new File(core.getResourceLoader().getConfigDir(), cfileName);
|
||||
|
||||
if (file.exists() && file.canRead()) {
|
||||
inputStream = new FileInputStream(file);
|
||||
FileChannel channel = inputStream.getChannel();
|
||||
|
@ -1103,17 +1186,8 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
IOUtils.closeQuietly(inputStream);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Used to write a marker for EOF
|
||||
*/
|
||||
private void writeNothing() throws IOException {
|
||||
fos.writeInt(0);
|
||||
fos.flush();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static final String MASTER_URL = "masterUrl";
|
||||
|
||||
public static final String STATUS = "status";
|
||||
|
@ -1132,8 +1206,6 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
|
||||
public static final String CMD_GET_FILE = "filecontent";
|
||||
|
||||
public static final String CMD_FILE_CHECKSUM = "filechecksum";
|
||||
|
||||
public static final String CMD_DISABLE_POLL = "disablepoll";
|
||||
|
||||
public static final String CMD_DISABLE_REPL = "disablereplication";
|
||||
|
@ -1158,8 +1230,6 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
|
||||
public static final String SIZE = "size";
|
||||
|
||||
public static final String LAST_MODIFIED = "lastmodified";
|
||||
|
||||
public static final String CONF_FILE_SHORT = "cf";
|
||||
|
||||
public static final String CHECKSUM = "checksum";
|
||||
|
|
|
@ -16,11 +16,67 @@
|
|||
*/
|
||||
package org.apache.solr.handler;
|
||||
|
||||
import static org.apache.solr.handler.ReplicationHandler.ALIAS;
|
||||
import static org.apache.solr.handler.ReplicationHandler.CHECKSUM;
|
||||
import static org.apache.solr.handler.ReplicationHandler.CMD_DETAILS;
|
||||
import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE;
|
||||
import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE_LIST;
|
||||
import static org.apache.solr.handler.ReplicationHandler.CMD_INDEX_VERSION;
|
||||
import static org.apache.solr.handler.ReplicationHandler.COMMAND;
|
||||
import static org.apache.solr.handler.ReplicationHandler.COMPRESSION;
|
||||
import static org.apache.solr.handler.ReplicationHandler.CONF_FILES;
|
||||
import static org.apache.solr.handler.ReplicationHandler.CONF_FILE_SHORT;
|
||||
import static org.apache.solr.handler.ReplicationHandler.EXTERNAL;
|
||||
import static org.apache.solr.handler.ReplicationHandler.FILE;
|
||||
import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM;
|
||||
import static org.apache.solr.handler.ReplicationHandler.GENERATION;
|
||||
import static org.apache.solr.handler.ReplicationHandler.INTERNAL;
|
||||
import static org.apache.solr.handler.ReplicationHandler.MASTER_URL;
|
||||
import static org.apache.solr.handler.ReplicationHandler.NAME;
|
||||
import static org.apache.solr.handler.ReplicationHandler.OFFSET;
|
||||
import static org.apache.solr.handler.ReplicationHandler.SIZE;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.zip.Adler32;
|
||||
import java.util.zip.Checksum;
|
||||
import java.util.zip.InflaterInputStream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.solr.client.solrj.SolrServer;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpClientUtil;
|
||||
|
@ -31,35 +87,22 @@ import org.apache.solr.common.params.CommonParams;
|
|||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.util.FastInputStream;
|
||||
import org.apache.solr.util.DefaultSolrThreadFactory;
|
||||
import org.apache.solr.util.FileUtils;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.CachingDirectoryFactory.CloseListener;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.core.IndexDeletionPolicyWrapper;
|
||||
import static org.apache.solr.handler.ReplicationHandler.*;
|
||||
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.handler.ReplicationHandler.FileInfo;
|
||||
import org.apache.solr.request.LocalSolrQueryRequest;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.update.CommitUpdateCommand;
|
||||
import org.apache.solr.util.DefaultSolrThreadFactory;
|
||||
import org.apache.solr.util.FileUtils;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.zip.Adler32;
|
||||
import java.util.zip.Checksum;
|
||||
import java.util.zip.InflaterInputStream;
|
||||
|
||||
/**
|
||||
* <p/> Provides functionality of downloading changed index files as well as config files and a timer for scheduling fetches from the
|
||||
* master. </p>
|
||||
|
@ -96,7 +139,9 @@ public class SnapPuller {
|
|||
|
||||
private volatile Map<String, Object> currentFile;
|
||||
|
||||
private volatile FileFetcher fileFetcher;
|
||||
private volatile DirectoryFileFetcher dirFileFetcher;
|
||||
|
||||
private volatile LocalFsFileFetcher localFileFetcher;
|
||||
|
||||
private volatile ExecutorService fsyncService;
|
||||
|
||||
|
@ -247,9 +292,12 @@ public class SnapPuller {
|
|||
* @return true on success, false if slave is already in sync
|
||||
* @throws IOException if an exception occurs
|
||||
*/
|
||||
boolean fetchLatestIndex(SolrCore core, boolean forceReplication) throws IOException, InterruptedException {
|
||||
boolean fetchLatestIndex(final SolrCore core, boolean forceReplication) throws IOException, InterruptedException {
|
||||
successfulInstall = false;
|
||||
replicationStartTime = System.currentTimeMillis();
|
||||
Directory tmpIndexDir = null;
|
||||
Directory indexDir = null;
|
||||
boolean deleteTmpIdxDir = true;
|
||||
try {
|
||||
//get the current 'replicateable' index version in the master
|
||||
NamedList response = null;
|
||||
|
@ -318,28 +366,34 @@ public class SnapPuller {
|
|||
// if the generateion of master is older than that of the slave , it means they are not compatible to be copied
|
||||
// then a new index direcory to be created and all the files need to be copied
|
||||
boolean isFullCopyNeeded = IndexDeletionPolicyWrapper.getCommitTimestamp(commit) >= latestVersion || forceReplication;
|
||||
File tmpIndexDir = createTempindexDir(core);
|
||||
if (isIndexStale()) {
|
||||
isFullCopyNeeded = true;
|
||||
}
|
||||
LOG.info("Starting download to " + tmpIndexDir + " fullCopy=" + isFullCopyNeeded);
|
||||
successfulInstall = false;
|
||||
boolean deleteTmpIdxDir = true;
|
||||
|
||||
String tmpIdxDirName = "index." + new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date());
|
||||
String tmpIndex = createTempindexDir(core, tmpIdxDirName);
|
||||
|
||||
tmpIndexDir = core.getDirectoryFactory().get(tmpIndex, null);
|
||||
|
||||
// make sure it's the newest known index dir...
|
||||
final File indexDir = new File(core.getNewIndexDir());
|
||||
indexDir = core.getDirectoryFactory().get(core.getNewIndexDir(), null);
|
||||
Directory oldDirectory = null;
|
||||
|
||||
try {
|
||||
|
||||
if (isIndexStale(indexDir)) {
|
||||
isFullCopyNeeded = true;
|
||||
}
|
||||
LOG.info("Starting download to " + tmpIndexDir + " fullCopy=" + isFullCopyNeeded);
|
||||
successfulInstall = false;
|
||||
|
||||
downloadIndexFiles(isFullCopyNeeded, tmpIndexDir, latestGeneration);
|
||||
LOG.info("Total time taken for download : " + ((System.currentTimeMillis() - replicationStartTime) / 1000) + " secs");
|
||||
Collection<Map<String, Object>> modifiedConfFiles = getModifiedConfFiles(confFilesToDownload);
|
||||
if (!modifiedConfFiles.isEmpty()) {
|
||||
downloadConfFiles(confFilesToDownload, latestGeneration);
|
||||
if (isFullCopyNeeded) {
|
||||
successfulInstall = modifyIndexProps(tmpIndexDir.getName());
|
||||
deleteTmpIdxDir = false;
|
||||
successfulInstall = modifyIndexProps(tmpIdxDirName);
|
||||
deleteTmpIdxDir = false;
|
||||
} else {
|
||||
successfulInstall = copyIndexFiles(tmpIndexDir, indexDir);
|
||||
successfulInstall = moveIndexFiles(tmpIndexDir, indexDir);
|
||||
}
|
||||
if (successfulInstall) {
|
||||
LOG.info("Configuration files are modified, core will be reloaded");
|
||||
|
@ -349,7 +403,7 @@ public class SnapPuller {
|
|||
} else {
|
||||
terminateAndWaitFsyncService();
|
||||
if (isFullCopyNeeded) {
|
||||
successfulInstall = modifyIndexProps(tmpIndexDir.getName());
|
||||
successfulInstall = modifyIndexProps(tmpIdxDirName);
|
||||
deleteTmpIdxDir = false;
|
||||
RefCounted<IndexWriter> iw = core.getUpdateHandler().getSolrCoreState().getIndexWriter(core);
|
||||
try {
|
||||
|
@ -358,7 +412,7 @@ public class SnapPuller {
|
|||
iw.decref();
|
||||
}
|
||||
} else {
|
||||
successfulInstall = copyIndexFiles(tmpIndexDir, indexDir);
|
||||
successfulInstall = moveIndexFiles(tmpIndexDir, indexDir);
|
||||
}
|
||||
if (successfulInstall) {
|
||||
logReplicationTimeAndConfFiles(modifiedConfFiles, successfulInstall);
|
||||
|
@ -367,17 +421,28 @@ public class SnapPuller {
|
|||
|
||||
if (isFullCopyNeeded) {
|
||||
// we have to do this before commit
|
||||
final Directory freezeIndexDir = indexDir;
|
||||
core.getDirectoryFactory().addCloseListener(oldDirectory, new CloseListener(){
|
||||
|
||||
@Override
|
||||
public void onClose() {
|
||||
LOG.info("removing old index directory " + indexDir);
|
||||
delTree(indexDir);
|
||||
public void preClose() {
|
||||
LOG.info("removing old index files " + freezeIndexDir);
|
||||
DirectoryFactory.empty(freezeIndexDir);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postClose() {
|
||||
LOG.info("removing old index directory " + freezeIndexDir);
|
||||
try {
|
||||
core.getDirectoryFactory().remove(freezeIndexDir);
|
||||
} catch (IOException e) {
|
||||
SolrException.log(LOG, "Error removing directory " + freezeIndexDir, e);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if (successfulInstall) {
|
||||
if (isFullCopyNeeded) {
|
||||
// let the system know we are changing dir's and the old one
|
||||
|
@ -400,21 +465,39 @@ public class SnapPuller {
|
|||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Index fetch failed : ", e);
|
||||
} finally {
|
||||
if (deleteTmpIdxDir) {
|
||||
LOG.info("removing temporary index download directory " + tmpIndexDir);
|
||||
delTree(tmpIndexDir);
|
||||
LOG.info("removing temporary index download directory files " + tmpIndexDir);
|
||||
DirectoryFactory.empty(tmpIndexDir);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (!successfulInstall) {
|
||||
logReplicationTimeAndConfFiles(null, successfulInstall);
|
||||
try {
|
||||
if (!successfulInstall) {
|
||||
logReplicationTimeAndConfFiles(null, successfulInstall);
|
||||
}
|
||||
filesToDownload = filesDownloaded = confFilesDownloaded = confFilesToDownload = null;
|
||||
replicationStartTime = 0;
|
||||
dirFileFetcher = null;
|
||||
localFileFetcher = null;
|
||||
if (fsyncService != null && !fsyncService.isShutdown()) fsyncService
|
||||
.shutdownNow();
|
||||
fsyncService = null;
|
||||
stop = false;
|
||||
fsyncException = null;
|
||||
} finally {
|
||||
if (tmpIndexDir != null) {
|
||||
core.getDirectoryFactory().release(tmpIndexDir);
|
||||
}
|
||||
if (deleteTmpIdxDir && tmpIndexDir != null) {
|
||||
try {
|
||||
core.getDirectoryFactory().remove(tmpIndexDir);
|
||||
} catch (IOException e) {
|
||||
SolrException.log(LOG, "Error removing directory " + tmpIndexDir, e);
|
||||
}
|
||||
}
|
||||
if (indexDir != null) {
|
||||
core.getDirectoryFactory().release(indexDir);
|
||||
}
|
||||
}
|
||||
filesToDownload = filesDownloaded = confFilesDownloaded = confFilesToDownload = null;
|
||||
replicationStartTime = 0;
|
||||
fileFetcher = null;
|
||||
if (fsyncService != null && !fsyncService.isShutdown()) fsyncService.shutdownNow();
|
||||
fsyncService = null;
|
||||
stop = false;
|
||||
fsyncException = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -535,7 +618,7 @@ public class SnapPuller {
|
|||
SolrQueryRequest req = new LocalSolrQueryRequest(solrCore,
|
||||
new ModifiableSolrParams());
|
||||
// reboot the writer on the new index and get a new searcher
|
||||
solrCore.getUpdateHandler().newIndexWriter(isFullCopyNeeded);
|
||||
solrCore.getUpdateHandler().newIndexWriter(isFullCopyNeeded, false);
|
||||
|
||||
try {
|
||||
// first try to open an NRT searcher so that the new
|
||||
|
@ -567,11 +650,9 @@ public class SnapPuller {
|
|||
/**
|
||||
* All the files are copied to a temp dir first
|
||||
*/
|
||||
private File createTempindexDir(SolrCore core) {
|
||||
String tmpIdxDirName = "index." + new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date());
|
||||
private String createTempindexDir(SolrCore core, String tmpIdxDirName) {
|
||||
File tmpIdxDir = new File(core.getDataDir(), tmpIdxDirName);
|
||||
tmpIdxDir.mkdirs();
|
||||
return tmpIdxDir;
|
||||
return tmpIdxDir.toString();
|
||||
}
|
||||
|
||||
private void reloadCore() {
|
||||
|
@ -599,9 +680,9 @@ public class SnapPuller {
|
|||
}
|
||||
for (Map<String, Object> file : confFilesToDownload) {
|
||||
String saveAs = (String) (file.get(ALIAS) == null ? file.get(NAME) : file.get(ALIAS));
|
||||
fileFetcher = new FileFetcher(tmpconfDir, file, saveAs, true, latestGeneration);
|
||||
localFileFetcher = new LocalFsFileFetcher(tmpconfDir, file, saveAs, true, latestGeneration);
|
||||
currentFile = file;
|
||||
fileFetcher.fetchFile();
|
||||
localFileFetcher.fetchFile();
|
||||
confFilesDownloaded.add(new HashMap<String, Object>(file));
|
||||
}
|
||||
// this is called before copying the files to the original conf dir
|
||||
|
@ -617,21 +698,29 @@ public class SnapPuller {
|
|||
* Download the index files. If a new index is needed, download all the files.
|
||||
*
|
||||
* @param downloadCompleteIndex is it a fresh index copy
|
||||
* @param tmpIdxDir the directory to which files need to be downloadeed to
|
||||
* @param tmpIndexDir the directory to which files need to be downloadeed to
|
||||
* @param latestGeneration the version number
|
||||
*/
|
||||
private void downloadIndexFiles(boolean downloadCompleteIndex, File tmpIdxDir, long latestGeneration) throws Exception {
|
||||
private void downloadIndexFiles(boolean downloadCompleteIndex,
|
||||
Directory tmpIndexDir, long latestGeneration) throws Exception {
|
||||
String indexDir = solrCore.getIndexDir();
|
||||
for (Map<String, Object> file : filesToDownload) {
|
||||
File localIndexFile = new File(indexDir, (String) file.get(NAME));
|
||||
if (!localIndexFile.exists() || downloadCompleteIndex) {
|
||||
fileFetcher = new FileFetcher(tmpIdxDir, file, (String) file.get(NAME), false, latestGeneration);
|
||||
currentFile = file;
|
||||
fileFetcher.fetchFile();
|
||||
filesDownloaded.add(new HashMap<String, Object>(file));
|
||||
} else {
|
||||
LOG.info("Skipping download for " + localIndexFile);
|
||||
|
||||
// it's okay to use null for lock factory since we know this dir will exist
|
||||
Directory dir = solrCore.getDirectoryFactory().get(indexDir, null);
|
||||
try {
|
||||
for (Map<String,Object> file : filesToDownload) {
|
||||
if (!dir.fileExists((String) file.get(NAME)) || downloadCompleteIndex) {
|
||||
dirFileFetcher = new DirectoryFileFetcher(tmpIndexDir, file,
|
||||
(String) file.get(NAME), false, latestGeneration);
|
||||
currentFile = file;
|
||||
dirFileFetcher.fetchFile();
|
||||
filesDownloaded.add(new HashMap<String,Object>(file));
|
||||
} else {
|
||||
LOG.info("Skipping download for " + file.get(NAME));
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
solrCore.getDirectoryFactory().release(dir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -640,13 +729,12 @@ public class SnapPuller {
|
|||
* not compatible (stale).
|
||||
*
|
||||
* @return true if the index stale and we need to download a fresh copy, false otherwise.
|
||||
* @throws IOException if low level io error
|
||||
*/
|
||||
private boolean isIndexStale() {
|
||||
private boolean isIndexStale(Directory dir) throws IOException {
|
||||
for (Map<String, Object> file : filesToDownload) {
|
||||
File localIndexFile = new File(solrCore.getIndexDir(), (String) file
|
||||
.get(NAME));
|
||||
if (localIndexFile.exists()
|
||||
&& localIndexFile.length() != (Long) file.get(SIZE)) {
|
||||
if (dir.fileExists((String) file.get(NAME))
|
||||
&& dir.fileLength((String) file.get(NAME)) != (Long) file.get(SIZE)) {
|
||||
// file exists and size is different, therefore we must assume
|
||||
// corrupted index
|
||||
return true;
|
||||
|
@ -659,52 +747,31 @@ public class SnapPuller {
|
|||
* Copy a file by the File#renameTo() method. If it fails, it is considered a failure
|
||||
* <p/>
|
||||
*/
|
||||
private boolean copyAFile(File tmpIdxDir, File indexDir, String fname, List<String> copiedfiles) {
|
||||
File indexFileInTmpDir = new File(tmpIdxDir, fname);
|
||||
File indexFileInIndex = new File(indexDir, fname);
|
||||
boolean success = indexFileInTmpDir.renameTo(indexFileInIndex);
|
||||
if(!success){
|
||||
try {
|
||||
LOG.error("Unable to move index file from: " + indexFileInTmpDir
|
||||
+ " to: " + indexFileInIndex + " Trying to do a copy");
|
||||
FileUtils.copyFile(indexFileInTmpDir,indexFileInIndex);
|
||||
success = true;
|
||||
} catch (FileNotFoundException e) {
|
||||
if (!indexDir.exists()) {
|
||||
File parent = indexDir.getParentFile();
|
||||
String[] children = null;
|
||||
if (parent != null) {
|
||||
children = parent.list();
|
||||
}
|
||||
LOG.error("The index directory does not exist: " + indexDir.getAbsolutePath()
|
||||
+ " dirs found: " + (children == null ? "none could be found" : Arrays.asList(children)));
|
||||
}
|
||||
LOG.error("Unable to copy index file from: " + indexFileInTmpDir
|
||||
+ " to: " + indexFileInIndex , e);
|
||||
} catch (IOException e) {
|
||||
LOG.error("Unable to copy index file from: " + indexFileInTmpDir
|
||||
+ " to: " + indexFileInIndex , e);
|
||||
private boolean moveAFile(Directory tmpIdxDir, Directory indexDir, String fname, List<String> copiedfiles) {
|
||||
boolean success = false;
|
||||
try {
|
||||
if (indexDir.fileExists(fname)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!success) {
|
||||
for (String f : copiedfiles) {
|
||||
File indexFile = new File(indexDir, f);
|
||||
if (indexFile.exists())
|
||||
indexFile.delete();
|
||||
}
|
||||
delTree(tmpIdxDir);
|
||||
} catch (IOException e) {
|
||||
SolrException.log(LOG, "could not check if a file exists", e);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
try {
|
||||
solrCore.getDirectoryFactory().move(tmpIdxDir, indexDir, fname);
|
||||
success = true;
|
||||
} catch (IOException e) {
|
||||
SolrException.log(LOG, "Could not move file", e);
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy all index files from the temp index dir to the actual index. The segments_N file is copied last.
|
||||
*/
|
||||
private boolean copyIndexFiles(File tmpIdxDir, File indexDir) {
|
||||
private boolean moveIndexFiles(Directory tmpIdxDir, Directory indexDir) {
|
||||
String segmentsFile = null;
|
||||
List<String> copiedfiles = new ArrayList<String>();
|
||||
List<String> movedfiles = new ArrayList<String>();
|
||||
for (Map<String, Object> f : filesDownloaded) {
|
||||
String fname = (String) f.get(NAME);
|
||||
// the segments file must be copied last
|
||||
|
@ -716,12 +783,12 @@ public class SnapPuller {
|
|||
segmentsFile = fname;
|
||||
continue;
|
||||
}
|
||||
if (!copyAFile(tmpIdxDir, indexDir, fname, copiedfiles)) return false;
|
||||
copiedfiles.add(fname);
|
||||
if (!moveAFile(tmpIdxDir, indexDir, fname, movedfiles)) return false;
|
||||
movedfiles.add(fname);
|
||||
}
|
||||
//copy the segments file last
|
||||
if (segmentsFile != null) {
|
||||
if (!copyAFile(tmpIdxDir, indexDir, segmentsFile, copiedfiles)) return false;
|
||||
if (!moveAFile(tmpIdxDir, indexDir, segmentsFile, movedfiles)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -759,31 +826,84 @@ public class SnapPuller {
|
|||
*/
|
||||
private boolean modifyIndexProps(String tmpIdxDirName) {
|
||||
LOG.info("New index installed. Updating index properties... index="+tmpIdxDirName);
|
||||
File idxprops = new File(solrCore.getDataDir() + "index.properties");
|
||||
Properties p = new Properties();
|
||||
if (idxprops.exists()) {
|
||||
InputStream is = null;
|
||||
Directory dir = null;
|
||||
try {
|
||||
dir = solrCore.getDirectoryFactory().get(solrCore.getDataDir(), null);
|
||||
if (dir.fileExists("index.properties")){
|
||||
final IndexInput input = dir.openInput("index.properties", IOContext.DEFAULT);
|
||||
|
||||
final InputStream is = new InputStream() {
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
byte next;
|
||||
try {
|
||||
next = input.readByte();
|
||||
} catch (EOFException e) {
|
||||
return -1;
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
input.close();
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
p.load(is);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Unable to load index.properties", e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
}
|
||||
try {
|
||||
is = new FileInputStream(idxprops);
|
||||
p.load(is);
|
||||
dir.deleteFile("index.properties");
|
||||
} catch (IOException e) {
|
||||
// no problem
|
||||
}
|
||||
final IndexOutput out = dir.createOutput("index.properties", IOContext.DEFAULT);
|
||||
p.put("index", tmpIdxDirName);
|
||||
OutputStream os = null;
|
||||
try {
|
||||
os = new OutputStream() {
|
||||
|
||||
@Override
|
||||
public void write(int b) throws IOException {
|
||||
out.writeByte((byte) b);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
out.close();
|
||||
}
|
||||
};
|
||||
p.store(os, "index properties");
|
||||
} catch (Exception e) {
|
||||
LOG.error("Unable to load index.properties");
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Unable to write index.properties", e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
IOUtils.closeQuietly(os);
|
||||
}
|
||||
return true;
|
||||
|
||||
} catch (IOException e1) {
|
||||
throw new RuntimeException(e1);
|
||||
} finally {
|
||||
if (dir != null) {
|
||||
try {
|
||||
solrCore.getDirectoryFactory().release(dir);
|
||||
} catch (IOException e) {
|
||||
SolrException.log(LOG, "", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
p.put("index", tmpIdxDirName);
|
||||
FileOutputStream os = null;
|
||||
try {
|
||||
os = new FileOutputStream(idxprops);
|
||||
p.store(os, "index properties");
|
||||
} catch (Exception e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Unable to write index.properties", e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(os);
|
||||
}
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
private final Map<String, FileInfo> confFileInfoCache = new HashMap<String, FileInfo>();
|
||||
|
@ -820,13 +940,8 @@ public class SnapPuller {
|
|||
}
|
||||
return nameVsFile.isEmpty() ? Collections.EMPTY_LIST : nameVsFile.values();
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the directory tree recursively
|
||||
*/
|
||||
|
||||
static boolean delTree(File dir) {
|
||||
if (dir == null || !dir.exists())
|
||||
return false;
|
||||
boolean isSuccess = true;
|
||||
File contents[] = dir.listFiles();
|
||||
if (contents != null) {
|
||||
|
@ -902,9 +1017,10 @@ public class SnapPuller {
|
|||
return tmp == null ? Collections.EMPTY_LIST : new ArrayList<Map<String, Object>>(tmp);
|
||||
}
|
||||
|
||||
// TODO: currently does not reflect conf files
|
||||
Map<String, Object> getCurrentFile() {
|
||||
Map<String, Object> tmp = currentFile;
|
||||
FileFetcher tmpFileFetcher = fileFetcher;
|
||||
DirectoryFileFetcher tmpFileFetcher = dirFileFetcher;
|
||||
if (tmp == null)
|
||||
return null;
|
||||
tmp = new HashMap<String, Object>(tmp);
|
||||
|
@ -933,9 +1049,255 @@ public class SnapPuller {
|
|||
/**
|
||||
* The class acts as a client for ReplicationHandler.FileStream. It understands the protocol of wt=filestream
|
||||
*
|
||||
* @see org.apache.solr.handler.ReplicationHandler.FileStream
|
||||
* @see org.apache.solr.handler.ReplicationHandler.DirectoryFileStream
|
||||
*/
|
||||
private class FileFetcher {
|
||||
private class DirectoryFileFetcher {
|
||||
boolean includeChecksum = true;
|
||||
|
||||
Directory copy2Dir;
|
||||
|
||||
String fileName;
|
||||
|
||||
String saveAs;
|
||||
|
||||
long size;
|
||||
|
||||
long bytesDownloaded = 0;
|
||||
|
||||
byte[] buf = new byte[1024 * 1024];
|
||||
|
||||
Checksum checksum;
|
||||
|
||||
int errorCount = 0;
|
||||
|
||||
private boolean isConf;
|
||||
|
||||
private boolean aborted = false;
|
||||
|
||||
private Long indexGen;
|
||||
|
||||
private IndexOutput outStream;
|
||||
|
||||
DirectoryFileFetcher(Directory tmpIndexDir, Map<String, Object> fileDetails, String saveAs,
|
||||
boolean isConf, long latestGen) throws IOException {
|
||||
this.copy2Dir = tmpIndexDir;
|
||||
this.fileName = (String) fileDetails.get(NAME);
|
||||
this.size = (Long) fileDetails.get(SIZE);
|
||||
this.isConf = isConf;
|
||||
this.saveAs = saveAs;
|
||||
|
||||
indexGen = latestGen;
|
||||
|
||||
outStream = copy2Dir.createOutput(saveAs, IOContext.DEFAULT);
|
||||
|
||||
if (includeChecksum)
|
||||
checksum = new Adler32();
|
||||
}
|
||||
|
||||
/**
|
||||
* The main method which downloads file
|
||||
*/
|
||||
void fetchFile() throws Exception {
|
||||
try {
|
||||
while (true) {
|
||||
final FastInputStream is = getStream();
|
||||
int result;
|
||||
try {
|
||||
//fetch packets one by one in a single request
|
||||
result = fetchPackets(is);
|
||||
if (result == 0 || result == NO_CONTENT) {
|
||||
|
||||
return;
|
||||
}
|
||||
//if there is an error continue. But continue from the point where it got broken
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
cleanup();
|
||||
//if cleanup suceeds . The file is downloaded fully. do an fsync
|
||||
fsyncService.submit(new Runnable(){
|
||||
public void run() {
|
||||
try {
|
||||
copy2Dir.sync(Collections.singleton(saveAs));
|
||||
} catch (IOException e) {
|
||||
fsyncException = e;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private int fetchPackets(FastInputStream fis) throws Exception {
|
||||
byte[] intbytes = new byte[4];
|
||||
byte[] longbytes = new byte[8];
|
||||
try {
|
||||
while (true) {
|
||||
if (stop) {
|
||||
stop = false;
|
||||
aborted = true;
|
||||
throw new ReplicationHandlerException("User aborted replication");
|
||||
}
|
||||
long checkSumServer = -1;
|
||||
fis.readFully(intbytes);
|
||||
//read the size of the packet
|
||||
int packetSize = readInt(intbytes);
|
||||
if (packetSize <= 0) {
|
||||
LOG.warn("No content recieved for file: " + currentFile);
|
||||
return NO_CONTENT;
|
||||
}
|
||||
if (buf.length < packetSize)
|
||||
buf = new byte[packetSize];
|
||||
if (checksum != null) {
|
||||
//read the checksum
|
||||
fis.readFully(longbytes);
|
||||
checkSumServer = readLong(longbytes);
|
||||
}
|
||||
//then read the packet of bytes
|
||||
fis.readFully(buf, 0, packetSize);
|
||||
//compare the checksum as sent from the master
|
||||
if (includeChecksum) {
|
||||
checksum.reset();
|
||||
checksum.update(buf, 0, packetSize);
|
||||
long checkSumClient = checksum.getValue();
|
||||
if (checkSumClient != checkSumServer) {
|
||||
LOG.error("Checksum not matched between client and server for: " + currentFile);
|
||||
//if checksum is wrong it is a problem return for retry
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
//if everything is fine, write down the packet to the file
|
||||
writeBytes(packetSize);
|
||||
bytesDownloaded += packetSize;
|
||||
if (bytesDownloaded >= size)
|
||||
return 0;
|
||||
//errorcount is always set to zero after a successful packet
|
||||
errorCount = 0;
|
||||
}
|
||||
} catch (ReplicationHandlerException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
LOG.warn("Error in fetching packets ", e);
|
||||
//for any failure , increment the error count
|
||||
errorCount++;
|
||||
//if it fails for the same pacaket for MAX_RETRIES fail and come out
|
||||
if (errorCount > MAX_RETRIES) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Fetch failed for file:" + fileName, e);
|
||||
}
|
||||
return ERR;
|
||||
}
|
||||
}
|
||||
|
||||
protected void writeBytes(int packetSize) throws IOException {
|
||||
outStream.writeBytes(buf, 0, packetSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* The webcontainer flushes the data only after it fills the buffer size. So, all data has to be read as readFully()
|
||||
* other wise it fails. So read everything as bytes and then extract an integer out of it
|
||||
*/
|
||||
private int readInt(byte[] b) {
|
||||
return (((b[0] & 0xff) << 24) | ((b[1] & 0xff) << 16)
|
||||
| ((b[2] & 0xff) << 8) | (b[3] & 0xff));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as above but to read longs from a byte array
|
||||
*/
|
||||
private long readLong(byte[] b) {
|
||||
return (((long) (b[0] & 0xff)) << 56) | (((long) (b[1] & 0xff)) << 48)
|
||||
| (((long) (b[2] & 0xff)) << 40) | (((long) (b[3] & 0xff)) << 32)
|
||||
| (((long) (b[4] & 0xff)) << 24) | ((b[5] & 0xff) << 16)
|
||||
| ((b[6] & 0xff) << 8) | ((b[7] & 0xff));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* cleanup everything
|
||||
*/
|
||||
private void cleanup() {
|
||||
try {
|
||||
outStream.close();
|
||||
} catch (Exception e) {/* noop */
|
||||
LOG.error("Error closing the file stream: "+ this.saveAs ,e);
|
||||
}
|
||||
if (bytesDownloaded != size) {
|
||||
//if the download is not complete then
|
||||
//delete the file being downloaded
|
||||
try {
|
||||
copy2Dir.deleteFile(saveAs);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Error deleting file in cleanup" + e.getMessage());
|
||||
}
|
||||
//if the failure is due to a user abort it is returned nomally else an exception is thrown
|
||||
if (!aborted)
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Unable to download " + fileName + " completely. Downloaded "
|
||||
+ bytesDownloaded + "!=" + size);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Open a new stream using HttpClient
|
||||
*/
|
||||
FastInputStream getStream() throws IOException {
|
||||
SolrServer s = new HttpSolrServer(masterUrl, myHttpClient, null); //XXX use shardhandler
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
|
||||
// //the method is command=filecontent
|
||||
params.set(COMMAND, CMD_GET_FILE);
|
||||
params.set(GENERATION, Long.toString(indexGen));
|
||||
params.set(CommonParams.QT, "/replication");
|
||||
//add the version to download. This is used to reserve the download
|
||||
if (isConf) {
|
||||
//set cf instead of file for config file
|
||||
params.set(CONF_FILE_SHORT, fileName);
|
||||
} else {
|
||||
params.set(FILE, fileName);
|
||||
}
|
||||
if (useInternal) {
|
||||
params.set(COMPRESSION, "true");
|
||||
}
|
||||
//use checksum
|
||||
if (this.includeChecksum) {
|
||||
params.set(CHECKSUM, true);
|
||||
}
|
||||
//wt=filestream this is a custom protocol
|
||||
params.set(CommonParams.WT, FILE_STREAM);
|
||||
// This happen if there is a failure there is a retry. the offset=<sizedownloaded> ensures that
|
||||
// the server starts from the offset
|
||||
if (bytesDownloaded > 0) {
|
||||
params.set(OFFSET, Long.toString(bytesDownloaded));
|
||||
}
|
||||
|
||||
|
||||
NamedList response;
|
||||
InputStream is = null;
|
||||
try {
|
||||
QueryRequest req = new QueryRequest(params);
|
||||
response = s.request(req);
|
||||
is = (InputStream) response.get("stream");
|
||||
if(useInternal) {
|
||||
is = new InflaterInputStream(is);
|
||||
}
|
||||
return new FastInputStream(is);
|
||||
} catch (Throwable t) {
|
||||
//close stream on error
|
||||
IOUtils.closeQuietly(is);
|
||||
throw new IOException("Could not download file '" + fileName + "'", t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The class acts as a client for ReplicationHandler.FileStream. It understands the protocol of wt=filestream
|
||||
*
|
||||
* @see org.apache.solr.handler.ReplicationHandler.LocalFsFileStream
|
||||
*/
|
||||
private class LocalFsFileFetcher {
|
||||
boolean includeChecksum = true;
|
||||
|
||||
private File copy2Dir;
|
||||
|
@ -944,7 +1306,7 @@ public class SnapPuller {
|
|||
|
||||
String saveAs;
|
||||
|
||||
long size, lastmodified;
|
||||
long size;
|
||||
|
||||
long bytesDownloaded = 0;
|
||||
|
||||
|
@ -966,16 +1328,15 @@ public class SnapPuller {
|
|||
|
||||
private Long indexGen;
|
||||
|
||||
FileFetcher(File dir, Map<String, Object> fileDetails, String saveAs,
|
||||
// TODO: could do more code sharing with DirectoryFileFetcher
|
||||
LocalFsFileFetcher(File dir, Map<String, Object> fileDetails, String saveAs,
|
||||
boolean isConf, long latestGen) throws IOException {
|
||||
this.copy2Dir = dir;
|
||||
this.fileName = (String) fileDetails.get(NAME);
|
||||
this.size = (Long) fileDetails.get(SIZE);
|
||||
this.isConf = isConf;
|
||||
this.saveAs = saveAs;
|
||||
if(fileDetails.get(LAST_MODIFIED) != null){
|
||||
lastmodified = (Long)fileDetails.get(LAST_MODIFIED);
|
||||
}
|
||||
|
||||
indexGen = latestGen;
|
||||
|
||||
this.file = new File(copy2Dir, saveAs);
|
||||
|
@ -1007,10 +1368,6 @@ public class SnapPuller {
|
|||
//fetch packets one by one in a single request
|
||||
result = fetchPackets(is);
|
||||
if (result == 0 || result == NO_CONTENT) {
|
||||
// if the file is downloaded properly set the
|
||||
// timestamp same as that in the server
|
||||
if (file.exists() && lastmodified > 0)
|
||||
file.setLastModified(lastmodified);
|
||||
return;
|
||||
}
|
||||
//if there is an error continue. But continue from the point where it got broken
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
package org.apache.solr.handler;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
|
@ -31,12 +28,13 @@ import java.util.Locale;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.SimpleFSLockFactory;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.IndexDeletionPolicyWrapper;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -101,8 +99,14 @@ public class SnapShooter {
|
|||
return;
|
||||
}
|
||||
Collection<String> files = indexCommit.getFileNames();
|
||||
FileCopier fileCopier = new FileCopier(solrCore.getDeletionPolicy(), indexCommit);
|
||||
fileCopier.copyFiles(files, snapShotDir);
|
||||
FileCopier fileCopier = new FileCopier();
|
||||
|
||||
Directory dir = solrCore.getDirectoryFactory().get(solrCore.getIndexDir(), null);
|
||||
try {
|
||||
fileCopier.copyFiles(dir, files, snapShotDir);
|
||||
} finally {
|
||||
solrCore.getDirectoryFactory().release(dir);
|
||||
}
|
||||
|
||||
details.add("fileCount", files.size());
|
||||
details.add("status", "success");
|
||||
|
@ -169,36 +173,26 @@ public class SnapShooter {
|
|||
|
||||
|
||||
private class FileCopier {
|
||||
private static final int DEFAULT_BUFFER_SIZE = 32768;
|
||||
private byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
|
||||
private IndexCommit indexCommit;
|
||||
private IndexDeletionPolicyWrapper delPolicy;
|
||||
|
||||
public FileCopier(IndexDeletionPolicyWrapper delPolicy, IndexCommit commit) {
|
||||
this.delPolicy = delPolicy;
|
||||
this.indexCommit = commit;
|
||||
}
|
||||
|
||||
public void copyFiles(Collection<String> files, File destDir) throws IOException {
|
||||
for (String indexFile : files) {
|
||||
File source = new File(solrCore.getIndexDir(), indexFile);
|
||||
copyFile(source, new File(destDir, source.getName()), true);
|
||||
}
|
||||
}
|
||||
|
||||
public void copyFile(File source, File destination, boolean preserveFileDate)
|
||||
throws IOException {
|
||||
// check source exists
|
||||
if (!source.exists()) {
|
||||
String message = "File " + source + " does not exist";
|
||||
throw new FileNotFoundException(message);
|
||||
}
|
||||
|
||||
public void copyFiles(Directory sourceDir, Collection<String> files,
|
||||
File destDir) throws IOException {
|
||||
// does destinations directory exist ?
|
||||
if (destination.getParentFile() != null
|
||||
&& !destination.getParentFile().exists()) {
|
||||
destination.getParentFile().mkdirs();
|
||||
if (destDir != null && !destDir.exists()) {
|
||||
destDir.mkdirs();
|
||||
}
|
||||
|
||||
FSDirectory dir = FSDirectory.open(destDir);
|
||||
try {
|
||||
for (String indexFile : files) {
|
||||
copyFile(sourceDir, indexFile, new File(destDir, indexFile), dir);
|
||||
}
|
||||
} finally {
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
public void copyFile(Directory sourceDir, String indexFile, File destination, Directory destDir)
|
||||
throws IOException {
|
||||
|
||||
// make sure we can write to destination
|
||||
if (destination.exists() && !destination.canWrite()) {
|
||||
|
@ -206,45 +200,7 @@ public class SnapShooter {
|
|||
throw new IOException(message);
|
||||
}
|
||||
|
||||
FileInputStream input = null;
|
||||
FileOutputStream output = null;
|
||||
try {
|
||||
input = new FileInputStream(source);
|
||||
output = new FileOutputStream(destination);
|
||||
|
||||
int count = 0;
|
||||
int n = 0;
|
||||
int rcnt = 0;
|
||||
while (-1 != (n = input.read(buffer))) {
|
||||
output.write(buffer, 0, n);
|
||||
count += n;
|
||||
rcnt++;
|
||||
/***
|
||||
// reserve every 4.6875 MB
|
||||
if (rcnt == 150) {
|
||||
rcnt = 0;
|
||||
delPolicy.setReserveDuration(indexCommit.getVersion(), reserveTime);
|
||||
}
|
||||
***/
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
IOUtils.closeQuietly(input);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(output);
|
||||
}
|
||||
}
|
||||
|
||||
if (source.length() != destination.length()) {
|
||||
String message = "Failed to copy full contents from " + source + " to "
|
||||
+ destination;
|
||||
throw new IOException(message);
|
||||
}
|
||||
|
||||
if (preserveFileDate) {
|
||||
// file copy should preserve file date
|
||||
destination.setLastModified(source.lastModified());
|
||||
}
|
||||
sourceDir.copy(destDir, indexFile, indexFile, IOContext.DEFAULT);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -610,20 +610,26 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
|
||||
@Override
|
||||
public void postClose(SolrCore core) {
|
||||
File dataDir = new File(core.getIndexDir());
|
||||
File[] files = dataDir.listFiles();
|
||||
if (files != null) {
|
||||
for (File file : files) {
|
||||
if (!file.delete()) {
|
||||
log.error(file.getAbsolutePath()
|
||||
+ " could not be deleted on core unload");
|
||||
Directory dir = null;
|
||||
try {
|
||||
dir = core.getDirectoryFactory().get(core.getIndexDir(), null);
|
||||
core.getDirectoryFactory().remove(dir);
|
||||
core.getDirectoryFactory().doneWithDirectory(dir);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
if (dir != null) {
|
||||
try {
|
||||
core.getDirectoryFactory().release(dir);
|
||||
} catch (IOException e) {
|
||||
log.error("IOException trying to release directory", e);
|
||||
}
|
||||
}
|
||||
if (!dataDir.delete()) log.error(dataDir.getAbsolutePath()
|
||||
+ " could not be deleted on core unload");
|
||||
} else {
|
||||
log.error(dataDir.getAbsolutePath()
|
||||
+ " could not be deleted on core unload");
|
||||
}
|
||||
try {
|
||||
core.getDirectoryFactory().remove(dir);
|
||||
} catch (IOException e) {
|
||||
log.error("IOException trying to remove directory", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -668,6 +674,10 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
});
|
||||
}
|
||||
} finally {
|
||||
// it's important that we try and cancel recovery
|
||||
// before we close here - else we might close the
|
||||
// core *in* recovery and end up locked in recovery
|
||||
// waiting to for recovery to be cancelled
|
||||
if (core != null) {
|
||||
if (coreContainer.getZkController() != null) {
|
||||
core.getSolrCoreState().cancelRecovery();
|
||||
|
@ -1001,7 +1011,19 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
}
|
||||
|
||||
private long getIndexSize(SolrCore core) {
|
||||
return FileUtils.sizeOfDirectory(new File(core.getIndexDir()));
|
||||
Directory dir;
|
||||
long size = 0;
|
||||
try {
|
||||
dir = core.getDirectoryFactory().get(core.getIndexDir(), null);
|
||||
try {
|
||||
size = DirectoryFactory.sizeOfDirectory(dir);
|
||||
} finally {
|
||||
core.getDirectoryFactory().release(dir);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
SolrException.log(log, "IO error while trying to get the size of the Directory", e);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
protected static String normalizePath(String path) {
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.handler.admin;
|
|||
|
||||
import java.io.DataInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.OperatingSystemMXBean;
|
||||
|
@ -104,7 +105,13 @@ public class SystemInfoHandler extends RequestHandlerBase
|
|||
dirs.add( "cwd" , new File( System.getProperty("user.dir")).getAbsolutePath() );
|
||||
dirs.add( "instance", new File( core.getResourceLoader().getInstanceDir() ).getAbsolutePath() );
|
||||
dirs.add( "data", new File( core.getDataDir() ).getAbsolutePath() );
|
||||
dirs.add( "index", new File( core.getIndexDir() ).getAbsolutePath() );
|
||||
dirs.add( "dirimpl", core.getDirectoryFactory().getClass().getName());
|
||||
try {
|
||||
dirs.add( "index", core.getDirectoryFactory().normalize(core.getIndexDir()) );
|
||||
} catch (IOException e) {
|
||||
log.warn("Problem getting the normalized index directory path", e);
|
||||
dirs.add( "index", "N/A" );
|
||||
}
|
||||
info.add( "directory", dirs );
|
||||
return info;
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.lucene.store.NRTCachingDirectory;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
|
@ -77,7 +78,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
|
|||
private static Logger log = LoggerFactory.getLogger(SolrIndexSearcher.class);
|
||||
private final SolrCore core;
|
||||
private final IndexSchema schema;
|
||||
private String indexDir;
|
||||
|
||||
private boolean debug = log.isDebugEnabled();
|
||||
|
||||
private final String name;
|
||||
|
@ -148,8 +149,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
|
|||
directoryFactory.incRef(dir);
|
||||
}
|
||||
|
||||
this.indexDir = getIndexDir(dir);
|
||||
|
||||
this.closeReader = closeReader;
|
||||
setSimilarity(schema.getSimilarity());
|
||||
|
||||
|
@ -273,7 +272,11 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
|
|||
// super.close();
|
||||
// can't use super.close() since it just calls reader.close() and that may only be called once
|
||||
// per reader (even if incRef() was previously called).
|
||||
if (closeReader) reader.decRef();
|
||||
try {
|
||||
if (closeReader) reader.decRef();
|
||||
} catch (Throwable t) {
|
||||
SolrException.log(log, "Problem dec ref'ing reader", t);
|
||||
}
|
||||
|
||||
for (SolrCache cache : cacheList) {
|
||||
cache.close();
|
||||
|
@ -409,12 +412,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
|
|||
// }
|
||||
// }
|
||||
|
||||
/**
|
||||
* @return the indexDir on which this searcher is opened
|
||||
*/
|
||||
public String getIndexDir() {
|
||||
return indexDir;
|
||||
}
|
||||
|
||||
/* ********************** Document retrieval *************************/
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
|
|||
}
|
||||
|
||||
@Override
|
||||
public synchronized void newIndexWriter(SolrCore core, boolean rollback) throws IOException {
|
||||
public synchronized void newIndexWriter(SolrCore core, boolean rollback, boolean forceNewDir) throws IOException {
|
||||
log.info("Creating new IndexWriter...");
|
||||
String coreName = core.getName();
|
||||
synchronized (writerPauseLock) {
|
||||
|
@ -148,7 +148,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
|
|||
}
|
||||
}
|
||||
}
|
||||
indexWriter = createMainIndexWriter(core, "DirectUpdateHandler2", true);
|
||||
indexWriter = createMainIndexWriter(core, "DirectUpdateHandler2", forceNewDir);
|
||||
log.info("New IndexWriter is ready to be used.");
|
||||
// we need to null this so it picks up the new writer next get call
|
||||
refCntWriter = null;
|
||||
|
@ -162,7 +162,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
|
|||
|
||||
@Override
|
||||
public synchronized void rollbackIndexWriter(SolrCore core) throws IOException {
|
||||
newIndexWriter(core, true);
|
||||
newIndexWriter(core, true, true);
|
||||
}
|
||||
|
||||
protected SolrIndexWriter createMainIndexWriter(SolrCore core, String name, boolean forceNewDirectory) throws IOException {
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
|
@ -604,8 +603,8 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
|
|||
}
|
||||
|
||||
@Override
|
||||
public void newIndexWriter(boolean rollback) throws IOException {
|
||||
solrCoreState.newIndexWriter(core, rollback);
|
||||
public void newIndexWriter(boolean rollback, boolean forceNewDir) throws IOException {
|
||||
solrCoreState.newIndexWriter(core, rollback, forceNewDir);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -44,7 +44,7 @@ public abstract class SolrCoreState {
|
|||
* @param rollback close IndexWriter if false, else rollback
|
||||
* @throws IOException If there is a low-level I/O error.
|
||||
*/
|
||||
public abstract void newIndexWriter(SolrCore core, boolean rollback) throws IOException;
|
||||
public abstract void newIndexWriter(SolrCore core, boolean rollback, boolean forceNewDir) throws IOException;
|
||||
|
||||
/**
|
||||
* Get the current IndexWriter. If a new IndexWriter must be created, use the
|
||||
|
|
|
@ -116,10 +116,11 @@ public abstract class UpdateHandler implements SolrInfoMBean {
|
|||
* all of the index files.
|
||||
*
|
||||
* @param rollback IndexWriter if true else close
|
||||
* @param forceNewDir Force a new Directory instance
|
||||
*
|
||||
* @throws IOException If there is a low-level I/O error.
|
||||
*/
|
||||
public abstract void newIndexWriter(boolean rollback) throws IOException;
|
||||
public abstract void newIndexWriter(boolean rollback, boolean forceNewDir) throws IOException;
|
||||
|
||||
public abstract SolrCoreState getSolrCoreState();
|
||||
|
||||
|
|
|
@ -23,49 +23,26 @@ import java.io.IOException;
|
|||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServer;
|
||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class AnalysisAfterCoreReloadTest extends AbstractSolrTestCase {
|
||||
private File homeDir;
|
||||
public class AnalysisAfterCoreReloadTest extends SolrTestCaseJ4 {
|
||||
|
||||
int port = 0;
|
||||
static final String context = "/solr";
|
||||
JettySolrRunner jetty;
|
||||
|
||||
static final String collection = "collection1";
|
||||
|
||||
@After
|
||||
public void cleanUp() throws Exception {
|
||||
jetty.stop();
|
||||
if (homeDir != null && homeDir.isDirectory() && homeDir.exists())
|
||||
recurseDelete(homeDir);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrHome() {
|
||||
return homeDir.getAbsolutePath();
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
homeDir = new File(TEMP_DIR + File.separator + "solr-test-home-" + System.nanoTime());
|
||||
homeDir.mkdirs();
|
||||
FileUtils.copyDirectory(new File(getFile("solr/" + collection).getParent()), homeDir, false);
|
||||
|
||||
super.setUp();
|
||||
|
||||
jetty = new JettySolrRunner(getSolrHome(), context, 0 );
|
||||
jetty.start(false);
|
||||
port = jetty.getLocalPort();
|
||||
}
|
||||
|
||||
public void testStopwordsAfterCoreReload() throws Exception {
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
doc.setField( "id", "42" );
|
||||
|
@ -100,8 +77,7 @@ public class AnalysisAfterCoreReloadTest extends AbstractSolrTestCase {
|
|||
|
||||
// overwrite stopwords file with stopword list ["stopwordc"] and reload the core
|
||||
overwriteStopwords("stopwordc\n");
|
||||
SolrServer coreadmin = getSolrAdmin();
|
||||
CoreAdminRequest.reloadCore(collection, coreadmin);
|
||||
h.getCoreContainer().reload(collection);
|
||||
|
||||
up.process( getSolrCore() );
|
||||
|
||||
|
@ -133,42 +109,33 @@ public class AnalysisAfterCoreReloadTest extends AbstractSolrTestCase {
|
|||
SolrCore core = h.getCoreContainer().getCore(collection);
|
||||
try {
|
||||
String configDir = core.getResourceLoader().getConfigDir();
|
||||
FileUtils.moveFile(new File(configDir, "stopwords.txt"), new File(configDir, "stopwords.txt.bak"));
|
||||
File file = new File(configDir, "stopwords.txt");
|
||||
FileUtils.writeStringToFile(file, stopwords);
|
||||
|
||||
} finally {
|
||||
core.close();
|
||||
}
|
||||
}
|
||||
|
||||
protected SolrServer getSolrAdmin() {
|
||||
return createServer("");
|
||||
}
|
||||
protected SolrServer getSolrCore() {
|
||||
return createServer(collection);
|
||||
}
|
||||
private SolrServer createServer( String name ) {
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
SolrCore core = h.getCoreContainer().getCore(collection);
|
||||
String configDir;
|
||||
try {
|
||||
// setup the server...
|
||||
String url = "http://127.0.0.1:"+port+context+"/"+name;
|
||||
HttpSolrServer s = new HttpSolrServer( url );
|
||||
s.setConnectionTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
|
||||
s.setDefaultMaxConnectionsPerHost(100);
|
||||
s.setMaxTotalConnections(100);
|
||||
return s;
|
||||
configDir = core.getResourceLoader().getConfigDir();
|
||||
} finally {
|
||||
core.close();
|
||||
}
|
||||
catch( Exception ex ) {
|
||||
throw new RuntimeException( ex );
|
||||
super.tearDown();
|
||||
if (new File(configDir, "stopwords.txt.bak").exists()) {
|
||||
FileUtils.deleteQuietly(new File(configDir, "stopwords.txt"));
|
||||
FileUtils.moveFile(new File(configDir, "stopwords.txt.bak"), new File(configDir, "stopwords.txt"));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig.xml";
|
||||
protected SolrServer getSolrCore() {
|
||||
return new EmbeddedSolrServer(h.getCore());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -33,13 +33,22 @@ import org.apache.solr.common.cloud.ZkStateReader;
|
|||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.servlet.SolrDispatchFilter;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
* This test simply does a bunch of basic things in solrcloud mode and asserts things
|
||||
* work as expected.
|
||||
*/
|
||||
public class BasicDistributedZk2Test extends AbstractFullDistribZkTestBase {
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeThisClass2() throws Exception {
|
||||
// TODO: we use an fs based dir because something
|
||||
// like a ram dir will not recover correctly right now
|
||||
// because tran log will still exist on restart and ram
|
||||
// dir will not persist - perhaps translog can empty on
|
||||
// start if using an EphemeralDirectoryFactory
|
||||
useFactory(null);
|
||||
}
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
@ -100,6 +109,7 @@ public class BasicDistributedZk2Test extends AbstractFullDistribZkTestBase {
|
|||
|
||||
// TODO: bring this to it's own method?
|
||||
// try indexing to a leader that has no replicas up
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
ZkNodeProps leaderProps = zkStateReader.getLeaderProps(
|
||||
DEFAULT_COLLECTION, SHARD2);
|
||||
|
||||
|
@ -175,9 +185,13 @@ public class BasicDistributedZk2Test extends AbstractFullDistribZkTestBase {
|
|||
|
||||
query("q", "*:*", "sort", "n_tl1 desc");
|
||||
|
||||
int oldLiveNodes = cloudClient.getZkStateReader().getZkClient().getChildren(ZkStateReader.LIVE_NODES_ZKNODE, null, true).size();
|
||||
|
||||
assertEquals(5, oldLiveNodes);
|
||||
|
||||
// kill a shard
|
||||
CloudJettyRunner deadShard = chaosMonkey.stopShard(SHARD2, 0);
|
||||
cloudClient.connect();
|
||||
|
||||
|
||||
// we are careful to make sure the downed node is no longer in the state,
|
||||
// because on some systems (especially freebsd w/ blackhole enabled), trying
|
||||
|
@ -186,10 +200,23 @@ public class BasicDistributedZk2Test extends AbstractFullDistribZkTestBase {
|
|||
jetties.addAll(shardToJetty.get(SHARD2));
|
||||
jetties.remove(deadShard);
|
||||
|
||||
// wait till live nodes drops by 1
|
||||
int liveNodes = cloudClient.getZkStateReader().getZkClient().getChildren(ZkStateReader.LIVE_NODES_ZKNODE, null, true).size();
|
||||
int tries = 50;
|
||||
while(oldLiveNodes == liveNodes) {
|
||||
Thread.sleep(100);
|
||||
if (tries-- == 0) {
|
||||
fail("We expected a node to drop...");
|
||||
}
|
||||
liveNodes = cloudClient.getZkStateReader().getZkClient().getChildren(ZkStateReader.LIVE_NODES_ZKNODE, null, true).size();
|
||||
}
|
||||
assertEquals(4, liveNodes);
|
||||
|
||||
int cnt = 0;
|
||||
for (CloudJettyRunner cjetty : jetties) {
|
||||
waitToSeeNotLive(((SolrDispatchFilter) cjetty.jetty.getDispatchFilter()
|
||||
.getFilter()).getCores().getZkController().getZkStateReader(),
|
||||
deadShard);
|
||||
deadShard, cnt++);
|
||||
}
|
||||
waitToSeeNotLive(cloudClient.getZkStateReader(), deadShard);
|
||||
|
||||
|
|
|
@ -74,6 +74,7 @@ import org.apache.solr.update.DirectUpdateHandler2;
|
|||
import org.apache.solr.update.SolrCmdDistributor.Request;
|
||||
import org.apache.solr.util.DefaultSolrThreadFactory;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
* This test simply does a bunch of basic things in solrcloud mode and asserts things
|
||||
|
@ -113,6 +114,12 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
|
|||
CompletionService<Request> completionService;
|
||||
Set<Future<Request>> pending;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeThisClass2() throws Exception {
|
||||
// TODO: we use an fs based dir because something
|
||||
// like a ram dir will not recover correctly right now
|
||||
useFactory(null);
|
||||
}
|
||||
|
||||
@Before
|
||||
@Override
|
||||
|
@ -153,7 +160,7 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
|
|||
public void doTest() throws Exception {
|
||||
// setLoggingLevel(null);
|
||||
|
||||
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
// make sure we have leaders for each shard
|
||||
for (int j = 1; j < sliceCount; j++) {
|
||||
zkStateReader.getLeaderProps(DEFAULT_COLLECTION, "shard" + j, 10000);
|
||||
|
@ -357,6 +364,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
|
|||
createCmd.setDataDir(core1DataDir);
|
||||
server.request(createCmd);
|
||||
|
||||
ZkStateReader zkStateReader = solrj.getZkStateReader();
|
||||
|
||||
zkStateReader.updateClusterState(true);
|
||||
|
||||
int slices = zkStateReader.getClusterState().getCollectionStates().get("unloadcollection").size();
|
||||
|
@ -591,6 +600,7 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
|
|||
executor.awaitTermination(120, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
|
||||
private String getBaseUrl(SolrServer client) {
|
||||
String url2 = ((HttpSolrServer) client).getBaseURL()
|
||||
.substring(
|
||||
|
@ -631,7 +641,7 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
|
|||
// poll for a second - it can take a moment before we are ready to serve
|
||||
waitForNon403or404or503(collectionClient);
|
||||
}
|
||||
|
||||
ZkStateReader zkStateReader = solrj.getZkStateReader();
|
||||
for (int j = 0; j < cnt; j++) {
|
||||
waitForRecoveriesToFinish("awholynewcollection_" + j, zkStateReader, false);
|
||||
}
|
||||
|
@ -1235,21 +1245,30 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
indexDoc("collection2", getDoc(id, "10000000"));
|
||||
indexDoc("collection2", getDoc(id, "10000001"));
|
||||
indexDoc("collection2", getDoc(id, "10000003"));
|
||||
|
||||
indexDoc("collection2", getDoc(id, "10000003"));
|
||||
solrj.setDefaultCollection("collection2");
|
||||
solrj.add(getDoc(id, "10000004"));
|
||||
solrj.setDefaultCollection(null);
|
||||
|
||||
indexDoc("collection3", getDoc(id, "20000000"));
|
||||
indexDoc("collection3", getDoc(id, "20000001"));
|
||||
solrj.setDefaultCollection("collection3");
|
||||
solrj.add(getDoc(id, "10000005"));
|
||||
solrj.setDefaultCollection(null);
|
||||
|
||||
otherCollectionClients.get("collection2").get(0).commit();
|
||||
otherCollectionClients.get("collection3").get(0).commit();
|
||||
|
||||
solrj.setDefaultCollection("collection1");
|
||||
long collection1Docs = solrj.query(new SolrQuery("*:*")).getResults()
|
||||
.getNumFound();
|
||||
|
||||
long collection2Docs = otherCollectionClients.get("collection2").get(0)
|
||||
.query(new SolrQuery("*:*")).getResults().getNumFound();
|
||||
System.out.println("found2: "+ collection2Docs);
|
||||
long collection3Docs = otherCollectionClients.get("collection3").get(0)
|
||||
.query(new SolrQuery("*:*")).getResults().getNumFound();
|
||||
System.out.println("found3: "+ collection3Docs);
|
||||
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.set("collection", "collection2,collection3");
|
||||
|
@ -1276,6 +1295,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
|
|||
query.remove("collection");
|
||||
found = solrj.query(query).getResults().getNumFound();
|
||||
assertEquals(collection1Docs, found);
|
||||
|
||||
assertEquals(collection3Docs, collection2Docs - 1);
|
||||
}
|
||||
|
||||
protected SolrInputDocument getDoc(Object... fields) throws Exception {
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrServer;
|
|||
import org.apache.solr.client.solrj.impl.HttpClientUtil;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
|
@ -82,7 +83,7 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
|
|||
handle.clear();
|
||||
handle.put("QTime", SKIPVAL);
|
||||
handle.put("timestamp", SKIPVAL);
|
||||
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
// make sure we have leaders for each shard
|
||||
for (int j = 1; j < sliceCount; j++) {
|
||||
zkStateReader.getLeaderProps(DEFAULT_COLLECTION, "shard" + j, 10000);
|
||||
|
@ -189,19 +190,6 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// skip the randoms - they can deadlock...
|
||||
protected void indexr(Object... fields) throws Exception {
|
||||
SolrInputDocument doc = getDoc(fields);
|
||||
indexDoc(doc);
|
||||
}
|
||||
|
||||
private SolrInputDocument getDoc(Object... fields) {
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
addFields(doc, fields);
|
||||
addFields(doc, "rnd_b", true);
|
||||
return doc;
|
||||
}
|
||||
|
||||
class FullThrottleStopableIndexingThread extends StopableIndexingThread {
|
||||
private HttpClient httpClient = HttpClientUtil.createClient(null);
|
||||
|
@ -306,4 +294,18 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
|
|||
|
||||
};
|
||||
|
||||
|
||||
// skip the randoms - they can deadlock...
|
||||
protected void indexr(Object... fields) throws Exception {
|
||||
SolrInputDocument doc = getDoc(fields);
|
||||
indexDoc(doc);
|
||||
}
|
||||
|
||||
SolrInputDocument getDoc(Object... fields) throws Exception {
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
addFields(doc, fields);
|
||||
addFields(doc, "rnd_b", true);
|
||||
return doc;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -131,6 +131,7 @@ public class FullSolrCloudDistribCmdsTest extends AbstractFullDistribZkTestBase
|
|||
}
|
||||
|
||||
private void testThatCantForwardToLeaderFails() throws Exception {
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
ZkNodeProps props = zkStateReader.getLeaderProps(DEFAULT_COLLECTION, "shard1");
|
||||
|
||||
chaosMonkey.stopShard("shard1");
|
||||
|
@ -250,7 +251,6 @@ public class FullSolrCloudDistribCmdsTest extends AbstractFullDistribZkTestBase
|
|||
|
||||
private void testOptimisticUpdate(QueryResponse results) throws Exception {
|
||||
SolrDocument doc = results.getResults().get(0);
|
||||
System.out.println("version:" + doc.getFieldValue(VersionInfo.VERSION_FIELD));
|
||||
Long version = (Long) doc.getFieldValue(VersionInfo.VERSION_FIELD);
|
||||
Integer theDoc = (Integer) doc.getFieldValue("id");
|
||||
UpdateRequest uReq = new UpdateRequest();
|
||||
|
|
|
@ -478,7 +478,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
}
|
||||
|
||||
private void verifyShardLeader(ZkStateReader reader, String collection, String shard, String expectedCore) throws InterruptedException, KeeperException {
|
||||
int maxIterations = 100;
|
||||
int maxIterations = 200;
|
||||
while(maxIterations-->0) {
|
||||
reader.updateClusterState(true); // poll state
|
||||
ZkNodeProps props = reader.getClusterState().getLeader(collection, shard);
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.solr.client.solrj.SolrQuery;
|
|||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -71,7 +72,7 @@ public class RecoveryZkTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
// make sure replication can start
|
||||
Thread.sleep(1500);
|
||||
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, false, true);
|
||||
|
||||
// stop indexing threads
|
||||
|
|
|
@ -47,8 +47,11 @@ import org.junit.BeforeClass;
|
|||
public class SyncSliceTest extends AbstractFullDistribZkTestBase {
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeSuperClass() {
|
||||
|
||||
public static void beforeSuperClass() throws Exception {
|
||||
// TODO: we use an fs based dir because something
|
||||
// like a ram dir will not recovery correctly right now
|
||||
// due to tran log persisting across restarts
|
||||
useFactory(null);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
|
|
@ -19,23 +19,26 @@ package org.apache.solr.cloud;
|
|||
|
||||
import java.io.File;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import junit.framework.Assert;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.WatchedEvent;
|
||||
import org.apache.zookeeper.Watcher;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class ZkSolrClientTest extends AbstractSolrTestCase {
|
||||
private static final boolean DEBUG = false;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
public void testConnect() throws Exception {
|
||||
String zkDir = dataDir.getAbsolutePath() + File.separator
|
||||
+ "zookeeper/server1/data";
|
||||
|
@ -43,7 +46,7 @@ public class ZkSolrClientTest extends AbstractSolrTestCase {
|
|||
|
||||
server = new ZkTestServer(zkDir);
|
||||
server.run();
|
||||
|
||||
AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
|
||||
SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT);
|
||||
|
||||
zkClient.close();
|
||||
|
@ -57,7 +60,7 @@ public class ZkSolrClientTest extends AbstractSolrTestCase {
|
|||
|
||||
server = new ZkTestServer(zkDir);
|
||||
server.run();
|
||||
|
||||
AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
|
||||
AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
|
||||
|
||||
SolrZkClient zkClient = new SolrZkClient(server.getZkHost(),
|
||||
|
@ -77,7 +80,7 @@ public class ZkSolrClientTest extends AbstractSolrTestCase {
|
|||
try {
|
||||
server = new ZkTestServer(zkDir);
|
||||
server.run();
|
||||
|
||||
AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
|
||||
AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
|
||||
|
||||
zkClient = new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT);
|
||||
|
@ -170,6 +173,7 @@ public class ZkSolrClientTest extends AbstractSolrTestCase {
|
|||
final AtomicInteger cnt = new AtomicInteger();
|
||||
ZkTestServer server = new ZkTestServer(zkDir);
|
||||
server.run();
|
||||
AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
|
||||
Thread.sleep(400);
|
||||
AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
|
||||
final SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT);
|
||||
|
@ -233,16 +237,6 @@ public class ZkSolrClientTest extends AbstractSolrTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
|
|
|
@ -41,7 +41,7 @@ public class AlternateDirectoryTest extends SolrTestCaseJ4 {
|
|||
assertTrue(TestIndexReaderFactory.newReaderCalled);
|
||||
}
|
||||
|
||||
static public class TestFSDirectoryFactory extends CachingDirectoryFactory {
|
||||
static public class TestFSDirectoryFactory extends StandardDirectoryFactory {
|
||||
public static volatile boolean openCalled = false;
|
||||
public static volatile Directory dir;
|
||||
|
||||
|
|
|
@ -17,17 +17,13 @@ package org.apache.solr.core;
|
|||
*/
|
||||
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class IndexReaderFactoryTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig-termindex.xml";
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig-termindex.xml", "schema.xml");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -67,7 +67,7 @@ public class SolrCoreCheckLockOnStartupTest extends SolrTestCaseJ4 {
|
|||
assertNotNull(t.getCause());
|
||||
assertTrue(t.getCause() instanceof RuntimeException);
|
||||
assertNotNull(t.getCause().getCause());
|
||||
assertTrue(t.getCause().getCause() instanceof LockObtainFailedException);
|
||||
assertTrue(t.getCause().getCause().toString(), t.getCause().getCause() instanceof LockObtainFailedException);
|
||||
} finally {
|
||||
indexWriter.close();
|
||||
directory.close();
|
||||
|
|
|
@ -85,16 +85,6 @@ public class TestArbitraryIndexDir extends AbstractSolrTestCase{
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return null; // prevent superclass from creating it's own TestHarness
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLoadNewIndexDir() throws IOException, ParserConfigurationException, SAXException, ParseException {
|
||||
//add a doc in original index dir
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.apache.solr.core.JmxMonitoredMap.SolrDynamicMBean;
|
|||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -35,14 +36,10 @@ import java.util.*;
|
|||
*/
|
||||
public class TestJmxIntegration extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig.xml";
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,24 +17,19 @@ package org.apache.solr.core;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
||||
import org.apache.solr.update.DirectUpdateHandler2;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class TestPropInject extends AbstractSolrTestCase {
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig-propinject.xml";
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig-propinject.xml", "schema.xml");
|
||||
}
|
||||
|
||||
public void testMergePolicy() throws Exception {
|
||||
|
|
|
@ -17,26 +17,19 @@ package org.apache.solr.core;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
|
||||
import org.apache.solr.update.processor.RegexReplaceProcessorFactory;
|
||||
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import org.apache.solr.update.processor.RegexReplaceProcessorFactory;
|
||||
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.Assume;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class TestXIncludeConfig extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema-xinclude.xml";
|
||||
}
|
||||
|
||||
//public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig-xinclude.xml";
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig-xinclude.xml", "schema-xinclude.xml");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,15 +27,19 @@ import org.apache.solr.request.LocalSolrQueryRequest;
|
|||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.search.QueryParsing;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
* Most of the tests for StandardRequestHandler are in ConvertedLegacyTest
|
||||
*
|
||||
*/
|
||||
public class StandardRequestHandlerTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override public String getSchemaFile() { return "schema.xml"; }
|
||||
@Override public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
@Override public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
lrf = h.getRequestFactory("standard", 0, 20 );
|
||||
|
|
|
@ -54,8 +54,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
|
|||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
/**
|
||||
* Test for ReplicationHandler
|
||||
|
@ -72,9 +72,9 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
+ File.separator + "collection1" + File.separator + "conf"
|
||||
+ File.separator;
|
||||
|
||||
static JettySolrRunner masterJetty, slaveJetty;
|
||||
static SolrServer masterClient, slaveClient;
|
||||
static SolrInstance master = null, slave = null;
|
||||
JettySolrRunner masterJetty, slaveJetty;
|
||||
SolrServer masterClient, slaveClient;
|
||||
SolrInstance master = null, slave = null;
|
||||
|
||||
static String context = "/solr";
|
||||
|
||||
|
@ -83,9 +83,11 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
static int nDocs = 500;
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
useFactory(null); // need an FS factory
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
super.setUp();
|
||||
// For manual testing only
|
||||
// useFactory(null); // force an FS factory
|
||||
master = new SolrInstance("master", null);
|
||||
master.setUp();
|
||||
masterJetty = createJetty(master);
|
||||
|
@ -109,8 +111,9 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
masterJetty.stop();
|
||||
slaveJetty.stop();
|
||||
master.tearDown();
|
||||
|
@ -415,7 +418,7 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
// setup an xslt dir to force subdir file replication
|
||||
File masterXsltDir = new File(master.getConfDir() + File.separator + "xslt");
|
||||
File masterXsl = new File(masterXsltDir, "dummy.xsl");
|
||||
assertTrue(masterXsltDir.mkdir());
|
||||
assertTrue("could not make dir " + masterXsltDir, masterXsltDir.mkdirs());
|
||||
assertTrue(masterXsl.createNewFile());
|
||||
|
||||
File slaveXsltDir = new File(slave.getConfDir() + File.separator + "xslt");
|
||||
|
@ -596,14 +599,10 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
|
||||
nDocs--;
|
||||
masterClient.deleteByQuery("*:*");
|
||||
for (int i = 0; i < nDocs; i++)
|
||||
index(masterClient, "id", i, "name", "name = " + i);
|
||||
|
||||
masterClient.commit();
|
||||
|
||||
NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient);
|
||||
SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response");
|
||||
assertEquals(nDocs, masterQueryResult.getNumFound());
|
||||
|
||||
|
||||
//change solrconfig having 'replicateAfter startup' option on master
|
||||
master.copyConfigFile(CONF_DIR + "solrconfig-master2.xml",
|
||||
|
@ -613,6 +612,16 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
|
||||
masterJetty = createJetty(master);
|
||||
masterClient = createNewSolrServer(masterJetty.getLocalPort());
|
||||
|
||||
for (int i = 0; i < nDocs; i++)
|
||||
index(masterClient, "id", i, "name", "name = " + i);
|
||||
|
||||
masterClient.commit();
|
||||
|
||||
NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient);
|
||||
SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response");
|
||||
assertEquals(nDocs, masterQueryResult.getNumFound());
|
||||
|
||||
|
||||
slave.setTestPort(masterJetty.getLocalPort());
|
||||
slave.copyConfigFile(slave.getSolrConfigFile(), "solrconfig.xml");
|
||||
|
@ -650,15 +659,6 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
//stop slave
|
||||
slaveJetty.stop();
|
||||
|
||||
masterClient.deleteByQuery("*:*");
|
||||
for (int i = 0; i < 10; i++)
|
||||
index(masterClient, "id", i, "name", "name = " + i);
|
||||
|
||||
masterClient.commit();
|
||||
|
||||
NamedList masterQueryRsp = rQuery(10, "*:*", masterClient);
|
||||
SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response");
|
||||
assertEquals(10, masterQueryResult.getNumFound());
|
||||
|
||||
//change solrconfig having 'replicateAfter startup' option on master
|
||||
master.copyConfigFile(CONF_DIR + "solrconfig-master3.xml",
|
||||
|
@ -669,6 +669,16 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
masterJetty = createJetty(master);
|
||||
masterClient = createNewSolrServer(masterJetty.getLocalPort());
|
||||
|
||||
masterClient.deleteByQuery("*:*");
|
||||
for (int i = 0; i < 10; i++)
|
||||
index(masterClient, "id", i, "name", "name = " + i);
|
||||
|
||||
masterClient.commit();
|
||||
|
||||
NamedList masterQueryRsp = rQuery(10, "*:*", masterClient);
|
||||
SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response");
|
||||
assertEquals(10, masterQueryResult.getNumFound());
|
||||
|
||||
slave.setTestPort(masterJetty.getLocalPort());
|
||||
slave.copyConfigFile(slave.getSolrConfigFile(), "solrconfig.xml");
|
||||
|
||||
|
|
|
@ -17,28 +17,24 @@
|
|||
|
||||
package org.apache.solr.handler.admin;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
|
||||
import org.apache.solr.common.luke.FieldFlag;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* :TODO: currently only tests some of the utilities in the LukeRequestHandler
|
||||
*/
|
||||
public class LukeRequestHandlerTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema12.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig.xml";
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema12.xml");
|
||||
}
|
||||
|
||||
@Before
|
||||
|
|
|
@ -16,25 +16,23 @@ package org.apache.solr.handler.component;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.TimeZone;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.MapSolrParams;
|
||||
import org.apache.solr.common.params.StatsParams;
|
||||
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.request.LocalSolrQueryRequest;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -42,19 +40,15 @@ import org.apache.solr.util.AbstractSolrTestCase;
|
|||
*/
|
||||
public class StatsComponentTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema11.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig.xml";
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema11.xml");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
clearIndex();
|
||||
lrf = h.getRequestFactory("standard", 0, 20);
|
||||
}
|
||||
|
||||
|
|
|
@ -16,16 +16,19 @@
|
|||
*/
|
||||
package org.apache.solr.highlight;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.apache.solr.handler.component.HighlightComponent;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.apache.solr.util.TestHarness;
|
||||
|
||||
import java.util.HashMap;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class HighlighterConfigTest extends AbstractSolrTestCase {
|
||||
@Override public String getSchemaFile() { return "schema.xml"; }
|
||||
// the default case (i.e. <highlight> without a class attribute) is tested every time sorlconfig.xml is used
|
||||
@Override public String getSolrConfigFile() { return "solrconfig-highlight.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig-highlight.xml", "schema.xml");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
|
|
@ -16,23 +16,23 @@
|
|||
*/
|
||||
package org.apache.solr.request;
|
||||
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.JavaBinCodec;
|
||||
import org.apache.solr.response.BinaryQueryResponseWriter;
|
||||
import org.apache.solr.response.BinaryResponseWriter.Resolver;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.search.ReturnFields;
|
||||
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.util.Locale;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.util.JavaBinCodec;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.response.BinaryQueryResponseWriter;
|
||||
import org.apache.solr.response.BinaryResponseWriter.Resolver;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.search.ReturnFields;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
* Test for BinaryResponseWriter
|
||||
*
|
||||
|
@ -41,14 +41,10 @@ import java.util.UUID;
|
|||
*/
|
||||
public class TestBinaryResponseWriter extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema12.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig.xml";
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema12.xml");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -96,7 +92,7 @@ public class TestBinaryResponseWriter extends AbstractSolrTestCase {
|
|||
assertTrue("ddd_s not found", out.getFieldNames().contains("ddd_s"));
|
||||
assertEquals("Wrong number of fields found",
|
||||
2, out.getFieldNames().size());
|
||||
|
||||
req.close();
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -17,30 +17,34 @@
|
|||
|
||||
package org.apache.solr.request;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.solr.client.solrj.ResponseParser;
|
||||
import org.apache.solr.client.solrj.impl.BinaryResponseParser;
|
||||
import org.apache.solr.client.solrj.impl.XMLResponseParser;
|
||||
import org.apache.solr.response.BinaryQueryResponseWriter;
|
||||
import org.apache.solr.response.QueryResponseWriter;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.apache.solr.client.solrj.ResponseParser;
|
||||
import org.apache.solr.client.solrj.impl.BinaryResponseParser;
|
||||
import org.apache.solr.client.solrj.impl.XMLResponseParser;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.io.*;
|
||||
|
||||
|
||||
public class TestWriterPerf extends AbstractSolrTestCase {
|
||||
|
||||
public static final Logger log
|
||||
= LoggerFactory.getLogger(TestWriterPerf.class);
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema11.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig-functionquery.xml"; }
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig-functionquery.xml", "schema11.xml");
|
||||
}
|
||||
|
||||
public String getCoreName() { return "basic"; }
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,13 +19,16 @@ package org.apache.solr.search;
|
|||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestExtendedDismaxParser extends AbstractSolrTestCase {
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema12.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema12.xml");
|
||||
}
|
||||
|
||||
// public String getCoreName() { return "collection1"; }
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,13 +18,15 @@ package org.apache.solr.search;
|
|||
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class TestQueryTypes extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema11.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema11.xml");
|
||||
}
|
||||
|
||||
public String getCoreName() { return "basic"; }
|
||||
|
||||
|
||||
|
|
|
@ -22,7 +22,9 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
@ -31,10 +33,11 @@ import java.util.List;
|
|||
*/
|
||||
public class TestQueryUtils extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.search;
|
|||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
|
@ -26,6 +27,7 @@ import org.apache.solr.update.processor.UpdateRequestProcessorChain;
|
|||
import org.apache.solr.update.processor.UpdateRequestProcessor;
|
||||
import org.apache.solr.update.AddUpdateCommand;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.*;
|
||||
import java.io.IOException;
|
||||
|
@ -35,10 +37,12 @@ import java.io.IOException;
|
|||
*/
|
||||
public class TestSearchPerf extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema11.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema11.xml");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
|
|
@ -19,13 +19,14 @@ package org.apache.solr.search;
|
|||
*/
|
||||
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class TestSurroundQueryParser extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schemasurround.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schemasurround.xml");
|
||||
}
|
||||
// public String getCoreName() { return "collection1"; }
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,6 +17,7 @@ package org.apache.solr.search.function;
|
|||
*/
|
||||
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -24,14 +25,18 @@ import org.apache.solr.util.AbstractSolrTestCase;
|
|||
*
|
||||
**/
|
||||
public class SortByFunctionTest extends AbstractSolrTestCase {
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema.xml";
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "solrconfig.xml";
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
}
|
||||
|
||||
public void test() throws Exception {
|
||||
|
|
|
@ -19,15 +19,19 @@ package org.apache.solr.servlet;
|
|||
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
||||
|
||||
public class DirectSolrConnectionTest extends AbstractSolrTestCase
|
||||
{
|
||||
@Override
|
||||
public String getSchemaFile() { return "solr/crazy-path-to-schema.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solr/crazy-path-to-config.xml"; }
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solr/crazy-path-to-config.xml", "solr/crazy-path-to-schema.xml");
|
||||
}
|
||||
|
||||
|
||||
DirectSolrConnection direct;
|
||||
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.solr.response.SolrQueryResponse;
|
|||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
class NewSearcherListener implements SolrEventListener {
|
||||
|
||||
|
@ -108,10 +109,10 @@ class NewSearcherListener implements SolrEventListener {
|
|||
@Slow
|
||||
public class AutoCommitTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
public static void verbose(Object... args) {
|
||||
if (!VERBOSE) return;
|
||||
|
@ -125,6 +126,14 @@ public class AutoCommitTest extends AbstractSolrTestCase {
|
|||
log.info(sb.toString());
|
||||
// System.out.println(sb.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
clearIndex();
|
||||
// reload the core to clear stats
|
||||
h.getCoreContainer().reload(h.getCore().getName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Take a string and make it an iterable ContentStream
|
||||
|
@ -141,8 +150,8 @@ public class AutoCommitTest extends AbstractSolrTestCase {
|
|||
}
|
||||
|
||||
public void testMaxDocs() throws Exception {
|
||||
|
||||
SolrCore core = h.getCore();
|
||||
|
||||
NewSearcherListener trigger = new NewSearcherListener();
|
||||
|
||||
DirectUpdateHandler2 updateHandler = (DirectUpdateHandler2)core.getUpdateHandler();
|
||||
|
@ -190,6 +199,7 @@ public class AutoCommitTest extends AbstractSolrTestCase {
|
|||
|
||||
public void testMaxTime() throws Exception {
|
||||
SolrCore core = h.getCore();
|
||||
|
||||
NewSearcherListener trigger = new NewSearcherListener();
|
||||
core.registerNewSearcherListener(trigger);
|
||||
DirectUpdateHandler2 updater = (DirectUpdateHandler2) core.getUpdateHandler();
|
||||
|
@ -263,6 +273,7 @@ public class AutoCommitTest extends AbstractSolrTestCase {
|
|||
|
||||
public void testCommitWithin() throws Exception {
|
||||
SolrCore core = h.getCore();
|
||||
|
||||
NewSearcherListener trigger = new NewSearcherListener();
|
||||
core.registerNewSearcherListener(trigger);
|
||||
DirectUpdateHandler2 updater = (DirectUpdateHandler2) core.getUpdateHandler();
|
||||
|
|
|
@ -16,15 +16,15 @@ package org.apache.solr.update;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -33,15 +33,9 @@ import java.io.FileFilter;
|
|||
**/
|
||||
public class DirectUpdateHandlerOptimizeTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "schema12.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
// return "solrconfig-duh-optimize.xml";
|
||||
return "solrconfig.xml";
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema12.xml");
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.junit.Test;
|
|||
*/
|
||||
public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
|
||||
|
||||
// TODO: fix this test to not require FSDirectory
|
||||
static String savedFactory;
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.solr.core.SolrEventListener;
|
|||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
* Test auto commit functionality in a way that doesn't suck.
|
||||
|
@ -55,10 +56,11 @@ import org.junit.Before;
|
|||
@Slow
|
||||
public class SoftAutoCommitTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
private MockEventListener monitor;
|
||||
private DirectUpdateHandler2 updater;
|
||||
|
@ -77,6 +79,13 @@ public class SoftAutoCommitTest extends AbstractSolrTestCase {
|
|||
updater.registerSoftCommitCallback(monitor);
|
||||
updater.registerCommitCallback(monitor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
// reset stats
|
||||
h.getCoreContainer().reload("collection1");
|
||||
}
|
||||
|
||||
public void testSoftAndHardCommitMaxTimeMixedAdds() throws Exception {
|
||||
|
||||
|
@ -179,7 +188,7 @@ public class SoftAutoCommitTest extends AbstractSolrTestCase {
|
|||
}
|
||||
|
||||
public void testSoftAndHardCommitMaxTimeDelete() throws Exception {
|
||||
|
||||
|
||||
final int softCommitWaitMillis = 500;
|
||||
final int hardCommitWaitMillis = 1200;
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.apache.solr.update;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
|
@ -41,9 +42,11 @@ public class TestIndexingPerformance extends AbstractSolrTestCase {
|
|||
// TODO: fix this test to not require FSDirectory
|
||||
static String savedFactory;
|
||||
@BeforeClass
|
||||
public static void beforeClass() {
|
||||
public static void beforeClass() throws Exception {
|
||||
savedFactory = System.getProperty("solr.DirectoryFactory");
|
||||
System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockFSDirectoryFactory");
|
||||
|
||||
initCore("solrconfig_perf.xml", "schema12.xml");
|
||||
}
|
||||
@AfterClass
|
||||
public static void afterClass() {
|
||||
|
@ -56,11 +59,7 @@ public class TestIndexingPerformance extends AbstractSolrTestCase {
|
|||
|
||||
public static final Logger log
|
||||
= LoggerFactory.getLogger(TestIndexingPerformance.class);
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema12.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig_perf.xml"; }
|
||||
|
||||
|
||||
public void testIndexingPerf() throws IOException {
|
||||
int iter=1000;
|
||||
|
|
|
@ -21,21 +21,22 @@ import java.util.HashMap;
|
|||
|
||||
import org.apache.solr.common.params.MapSolrParams;
|
||||
import org.apache.solr.common.params.UpdateParams;
|
||||
import org.apache.solr.core.*;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.handler.UpdateRequestHandler;
|
||||
import org.apache.solr.request.SolrQueryRequestBase;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
||||
|
||||
public class UpdateParamsTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() { return "schema.xml"; }
|
||||
@Override
|
||||
public String getSolrConfigFile() { return "solrconfig.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that only update.chain and not update.processor works (SOLR-2105)
|
||||
*/
|
||||
|
|
|
@ -17,23 +17,24 @@
|
|||
|
||||
package org.apache.solr.update.processor;
|
||||
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
|
||||
import org.apache.solr.update.processor.CustomUpdateRequestProcessor;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class UpdateRequestProcessorFactoryTest extends AbstractSolrTestCase {
|
||||
|
||||
@Override public String getSchemaFile() { return "schema.xml"; }
|
||||
@Override public String getSolrConfigFile() { return "solrconfig-transformers.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig-transformers.xml", "schema.xml");
|
||||
}
|
||||
|
||||
|
||||
public void testConfiguration() throws Exception
|
||||
|
|
|
@ -70,6 +70,7 @@ public class ClusterState implements JSONWriter.Writable {
|
|||
*/
|
||||
public ClusterState(Integer zkClusterStateVersion, Set<String> liveNodes,
|
||||
Map<String, Map<String,Slice>> collectionStates) {
|
||||
this.zkClusterStateVersion = zkClusterStateVersion;
|
||||
this.liveNodes = new HashSet<String>(liveNodes.size());
|
||||
this.liveNodes.addAll(liveNodes);
|
||||
this.collectionStates = new HashMap<String, Map<String,Slice>>(collectionStates.size());
|
||||
|
|
|
@ -197,9 +197,13 @@ public class ZkStateReader {
|
|||
Stat stat = new Stat();
|
||||
byte[] data = zkClient.getData(CLUSTER_STATE, thisWatch, stat ,
|
||||
true);
|
||||
|
||||
ClusterState clusterState = ClusterState.load(stat.getVersion(), data,
|
||||
ZkStateReader.this.clusterState.getLiveNodes());
|
||||
List<String> liveNodes = zkClient.getChildren(
|
||||
LIVE_NODES_ZKNODE, this, true);
|
||||
|
||||
Set<String> liveNodesSet = new HashSet<String>();
|
||||
liveNodesSet.addAll(liveNodes);
|
||||
Set<String> ln = ZkStateReader.this.clusterState.getLiveNodes();
|
||||
ClusterState clusterState = ClusterState.load(stat.getVersion(), data, ln);
|
||||
// update volatile
|
||||
ZkStateReader.this.clusterState = clusterState;
|
||||
}
|
||||
|
@ -301,9 +305,9 @@ public class ZkStateReader {
|
|||
ZkStateReader.this.clusterState.getZkClusterStateVersion(), liveNodesSet,
|
||||
ZkStateReader.this.clusterState.getCollectionStates());
|
||||
}
|
||||
this.clusterState = clusterState;
|
||||
}
|
||||
|
||||
this.clusterState = clusterState;
|
||||
} else {
|
||||
if (clusterStateUpdateScheduled) {
|
||||
log.info("Cloud state update for ZooKeeper already scheduled");
|
||||
|
@ -330,7 +334,7 @@ public class ZkStateReader {
|
|||
clusterState = ClusterState.load(zkClient, liveNodesSet);
|
||||
} else {
|
||||
log.info("Updating live nodes from ZooKeeper... ");
|
||||
clusterState = new ClusterState(ZkStateReader.this.clusterState .getZkClusterStateVersion(), liveNodesSet, ZkStateReader.this.clusterState.getCollectionStates());
|
||||
clusterState = new ClusterState(ZkStateReader.this.clusterState.getZkClusterStateVersion(), liveNodesSet, ZkStateReader.this.clusterState.getCollectionStates());
|
||||
}
|
||||
|
||||
ZkStateReader.this.clusterState = clusterState;
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.solr.client.solrj;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
|
@ -28,6 +29,8 @@ import org.apache.solr.common.SolrInputDocument;
|
|||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.util.ExternalPaths;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
* Abstract base class for testing merge indexes command
|
||||
|
@ -36,7 +39,6 @@ import org.apache.solr.util.ExternalPaths;
|
|||
*
|
||||
*/
|
||||
public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
||||
// protected static final CoreContainer cores = new CoreContainer();
|
||||
protected static CoreContainer cores;
|
||||
private String saveProp;
|
||||
private File dataDir2;
|
||||
|
@ -46,28 +48,30 @@ public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
|||
return ExternalPaths.EXAMPLE_MULTICORE_HOME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return getSolrHome() + "/core0/conf/schema.xml";
|
||||
@BeforeClass
|
||||
public static void beforeClass2() throws Exception {
|
||||
if (dataDir == null) {
|
||||
createTempDir();
|
||||
}
|
||||
cores = new CoreContainer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return getSolrHome() + "/core0/conf/solrconfig.xml";
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() {
|
||||
cores.shutdown();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
saveProp = System.getProperty("solr.directoryFactory");
|
||||
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
|
||||
super.setUp();
|
||||
|
||||
cores = h.getCoreContainer();
|
||||
SolrCore.log.info("CORES=" + cores + " : " + cores.getCoreNames());
|
||||
cores.setPersistent(false);
|
||||
|
||||
// setup datadirs
|
||||
System.setProperty( "solr.core0.data.dir", this.dataDir.getCanonicalPath() );
|
||||
System.setProperty( "solr.core0.data.dir", SolrTestCaseJ4.dataDir.getCanonicalPath() );
|
||||
|
||||
dataDir2 = new File(TEMP_DIR, getClass().getName() + "-"
|
||||
+ System.currentTimeMillis());
|
||||
|
@ -82,10 +86,10 @@ public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
|||
|
||||
String skip = System.getProperty("solr.test.leavedatadir");
|
||||
if (null != skip && 0 != skip.trim().length()) {
|
||||
System.err.println("NOTE: per solr.test.leavedatadir, dataDir will not be removed: " + dataDir.getAbsolutePath());
|
||||
System.err.println("NOTE: per solr.test.leavedatadir, dataDir will not be removed: " + dataDir2.getAbsolutePath());
|
||||
} else {
|
||||
if (!recurseDelete(dataDir)) {
|
||||
System.err.println("!!!! WARNING: best effort to remove " + dataDir.getAbsolutePath() + " FAILED !!!!!");
|
||||
if (!recurseDelete(dataDir2)) {
|
||||
System.err.println("!!!! WARNING: best effort to remove " + dataDir2.getAbsolutePath() + " FAILED !!!!!");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,16 +19,19 @@ package org.apache.solr.client.solrj;
|
|||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION;
|
||||
import org.apache.solr.client.solrj.response.CoreAdminResponse;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.util.ExternalPaths;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
|
@ -38,18 +41,26 @@ import org.junit.Test;
|
|||
*/
|
||||
public abstract class MultiCoreExampleTestBase extends SolrExampleTestBase
|
||||
{
|
||||
// protected static final CoreContainer cores = new CoreContainer();
|
||||
protected CoreContainer cores;
|
||||
protected static CoreContainer cores;
|
||||
|
||||
private File dataDir2;
|
||||
|
||||
@Override public String getSolrHome() { return ExternalPaths.EXAMPLE_MULTICORE_HOME; }
|
||||
|
||||
|
||||
@Override public String getSchemaFile() { return getSolrHome()+"/core0/conf/schema.xml"; }
|
||||
@Override public String getSolrConfigFile() { return getSolrHome()+"/core0/conf/solrconfig.xml"; }
|
||||
@BeforeClass
|
||||
public static void beforeThisClass2() throws Exception {
|
||||
cores = new CoreContainer();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() {
|
||||
cores.shutdown();
|
||||
}
|
||||
|
||||
@Override public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
cores = h.getCoreContainer();
|
||||
|
||||
SolrCore.log.info("CORES=" + cores + " : " + cores.getCoreNames());
|
||||
cores.setPersistent(false);
|
||||
|
||||
|
@ -57,7 +68,7 @@ public abstract class MultiCoreExampleTestBase extends SolrExampleTestBase
|
|||
+ System.currentTimeMillis());
|
||||
dataDir2.mkdirs();
|
||||
|
||||
System.setProperty( "solr.core0.data.dir", this.dataDir.getCanonicalPath() );
|
||||
System.setProperty( "solr.core0.data.dir", SolrTestCaseJ4.dataDir.getCanonicalPath() );
|
||||
System.setProperty( "solr.core1.data.dir", this.dataDir2.getCanonicalPath() );
|
||||
}
|
||||
|
||||
|
@ -73,8 +84,6 @@ public abstract class MultiCoreExampleTestBase extends SolrExampleTestBase
|
|||
System.err.println("!!!! WARNING: best effort to remove " + dataDir2.getAbsolutePath() + " FAILED !!!!!");
|
||||
}
|
||||
}
|
||||
|
||||
cores = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -208,6 +217,12 @@ public abstract class MultiCoreExampleTestBase extends SolrExampleTestBase
|
|||
NamedList<Object> response = getSolrCore("corefoo").query(new SolrQuery().setRequestHandler("/admin/system")).getResponse();
|
||||
NamedList<Object> coreInfo = (NamedList<Object>) response.get("core");
|
||||
String indexDir = (String) ((NamedList<Object>) coreInfo.get("directory")).get("index");
|
||||
|
||||
|
||||
|
||||
System.out.println( (String) ((NamedList<Object>) coreInfo.get("directory")).get("dirimpl"));
|
||||
|
||||
|
||||
// test delete index on core
|
||||
CoreAdminRequest.unloadCore("corefoo", true, coreadmin);
|
||||
File dir = new File(indexDir);
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.client.solrj;
|
|||
|
||||
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
/**
|
||||
* This should include tests against the example solr config
|
||||
|
@ -32,9 +33,13 @@ abstract public class SolrExampleTestBase extends AbstractSolrTestCase
|
|||
{
|
||||
@Override
|
||||
public String getSolrHome() { return "../../../example/solr/"; }
|
||||
|
||||
@Override public String getSchemaFile() { return getSolrHome()+"conf/schema.xml"; }
|
||||
@Override public String getSolrConfigFile() { return getSolrHome()+"conf/solrconfig.xml"; }
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
if (dataDir == null) {
|
||||
createTempDir();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception
|
||||
|
|
|
@ -56,7 +56,7 @@ public class MultiCoreExampleJettyTest extends MultiCoreExampleTestBase {
|
|||
jetty.start(false);
|
||||
port = jetty.getLocalPort();
|
||||
|
||||
h.getCoreContainer().setPersistent(false);
|
||||
cores.setPersistent(false);
|
||||
}
|
||||
|
||||
@Override public void tearDown() throws Exception
|
||||
|
|
|
@ -178,6 +178,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
|||
/** Call initCore in @BeforeClass to instantiate a solr core in your test class.
|
||||
* deleteCore will be called for you via SolrTestCaseJ4 @AfterClass */
|
||||
public static void initCore(String config, String schema, String solrHome) throws Exception {
|
||||
assertNotNull(solrHome);
|
||||
configString = config;
|
||||
schemaString = schema;
|
||||
testSolrHome = solrHome;
|
||||
|
@ -378,6 +379,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public static void createCore() {
|
||||
assertNotNull(testSolrHome);
|
||||
solrConfig = TestHarness.createConfig(testSolrHome, coreName, getSolrConfigFile());
|
||||
h = new TestHarness( dataDir.getAbsolutePath(),
|
||||
solrConfig,
|
||||
|
|
|
@ -44,7 +44,8 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes
|
|||
|
||||
@BeforeClass
|
||||
public static void beforeThisClass() throws Exception {
|
||||
useFactory(null);
|
||||
// Only For Manual Testing: this will force an fs based dir factory
|
||||
//useFactory(null);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
|
||||
import org.apache.http.params.CoreConnectionPNames;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.apache.solr.BaseDistributedSearchTestCase.RandVal;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServer;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
|
@ -51,10 +52,12 @@ import org.apache.solr.common.cloud.ZkCoreNodeProps;
|
|||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -101,9 +104,9 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
protected Map<String,List<CloudJettyRunner>> shardToJetty = new HashMap<String,List<CloudJettyRunner>>();
|
||||
private AtomicInteger jettyIntCntr = new AtomicInteger(0);
|
||||
protected ChaosMonkey chaosMonkey;
|
||||
protected volatile ZkStateReader zkStateReader;
|
||||
|
||||
protected Map<String,CloudJettyRunner> shardToLeaderJetty = new HashMap<String,CloudJettyRunner>();
|
||||
private boolean cloudInit;
|
||||
|
||||
public static class CloudJettyRunner {
|
||||
public JettySolrRunner jetty;
|
||||
|
@ -195,61 +198,28 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
}
|
||||
|
||||
protected void initCloud() throws Exception {
|
||||
if (zkStateReader == null) {
|
||||
synchronized (this) {
|
||||
if (zkStateReader != null) {
|
||||
return;
|
||||
}
|
||||
zkStateReader = new ZkStateReader(zkServer.getZkAddress(), 10000,
|
||||
AbstractZkTestCase.TIMEOUT);
|
||||
|
||||
zkStateReader.createClusterStateWatchersAndUpdate();
|
||||
}
|
||||
assert(cloudInit == false);
|
||||
cloudInit = true;
|
||||
try {
|
||||
CloudSolrServer server = new CloudSolrServer(zkServer.getZkAddress());
|
||||
server.setDefaultCollection(DEFAULT_COLLECTION);
|
||||
server.getLbServer().getHttpClient().getParams()
|
||||
.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 5000);
|
||||
server.getLbServer().getHttpClient().getParams()
|
||||
.setParameter(CoreConnectionPNames.SO_TIMEOUT, 20000);
|
||||
cloudClient = server;
|
||||
|
||||
chaosMonkey = new ChaosMonkey(zkServer, zkStateReader,
|
||||
DEFAULT_COLLECTION, shardToJetty,
|
||||
shardToLeaderJetty);
|
||||
cloudClient.connect();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
// wait until shards have started registering...
|
||||
int cnt = 30;
|
||||
while (!zkStateReader.getClusterState().getCollections()
|
||||
.contains(DEFAULT_COLLECTION)) {
|
||||
if (cnt == 0) {
|
||||
throw new RuntimeException("timeout waiting for collection1 in cluster state");
|
||||
}
|
||||
cnt--;
|
||||
Thread.sleep(500);
|
||||
}
|
||||
cnt = 30;
|
||||
while (zkStateReader.getClusterState().getSlices(DEFAULT_COLLECTION).size() != sliceCount) {
|
||||
if (cnt == 0) {
|
||||
throw new RuntimeException("timeout waiting for collection shards to come up");
|
||||
}
|
||||
cnt--;
|
||||
Thread.sleep(500);
|
||||
}
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
|
||||
// use the distributed solrj client
|
||||
if (cloudClient == null) {
|
||||
synchronized (this) {
|
||||
if (cloudClient != null) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
CloudSolrServer server = new CloudSolrServer(zkServer.getZkAddress());
|
||||
server.setDefaultCollection(DEFAULT_COLLECTION);
|
||||
server.getLbServer().getHttpClient().getParams()
|
||||
.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 5000);
|
||||
server.getLbServer().getHttpClient().getParams()
|
||||
.setParameter(CoreConnectionPNames.SO_TIMEOUT, 20000);
|
||||
cloudClient = server;
|
||||
} catch (MalformedURLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
chaosMonkey = new ChaosMonkey(zkServer, zkStateReader, DEFAULT_COLLECTION,
|
||||
shardToJetty, shardToLeaderJetty);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void createServers(int numServers) throws Exception {
|
||||
|
@ -270,8 +240,31 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
}
|
||||
controlClient = createNewSolrServer(controlJetty.getLocalPort());
|
||||
|
||||
initCloud();
|
||||
|
||||
createJettys(numServers, true);
|
||||
|
||||
// wait until shards have started registering...
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
int cnt = 30;
|
||||
while (!zkStateReader.getClusterState().getCollections()
|
||||
.contains(DEFAULT_COLLECTION)) {
|
||||
if (cnt == 0) {
|
||||
throw new RuntimeException(
|
||||
"timeout waiting for collection1 in cluster state");
|
||||
}
|
||||
cnt--;
|
||||
Thread.sleep(500);
|
||||
}
|
||||
cnt = 30;
|
||||
while (zkStateReader.getClusterState().getSlices(DEFAULT_COLLECTION).size() != sliceCount) {
|
||||
if (cnt == 0) {
|
||||
throw new RuntimeException(
|
||||
"timeout waiting for collection shards to come up");
|
||||
}
|
||||
cnt--;
|
||||
Thread.sleep(500);
|
||||
}
|
||||
}
|
||||
|
||||
protected List<JettySolrRunner> createJettys(int numJettys) throws Exception {
|
||||
|
@ -302,9 +295,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
SolrServer client = createNewSolrServer(j.getLocalPort());
|
||||
clients.add(client);
|
||||
}
|
||||
|
||||
initCloud();
|
||||
|
||||
|
||||
this.jettys.addAll(jettys);
|
||||
this.clients.addAll(clients);
|
||||
|
||||
|
@ -324,6 +315,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
Thread.sleep(500);
|
||||
}
|
||||
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
// also make sure we have a leader for each shard
|
||||
for (int i = 1; i <= sliceCount; i++) {
|
||||
zkStateReader.getLeaderProps(DEFAULT_COLLECTION, "shard" + i, 10000);
|
||||
|
@ -346,7 +338,8 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
}
|
||||
|
||||
protected int getNumShards(String defaultCollection) {
|
||||
Map<String,Slice> slices = this.zkStateReader.getClusterState().getSlices(defaultCollection);
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
Map<String,Slice> slices = zkStateReader.getClusterState().getSlices(defaultCollection);
|
||||
int cnt = 0;
|
||||
for (Map.Entry<String,Slice> entry : slices.entrySet()) {
|
||||
cnt += entry.getValue().getReplicasMap().size();
|
||||
|
@ -369,6 +362,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
|
||||
protected void updateMappingsFromZk(List<JettySolrRunner> jettys,
|
||||
List<SolrServer> clients) throws Exception {
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
zkStateReader.updateClusterState(true);
|
||||
cloudJettys.clear();
|
||||
shardToJetty.clear();
|
||||
|
@ -555,11 +549,13 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
|
||||
protected void waitForRecoveriesToFinish(boolean verbose)
|
||||
throws Exception {
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
super.waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, verbose);
|
||||
}
|
||||
|
||||
protected void waitForRecoveriesToFinish(boolean verbose, int timeoutSeconds)
|
||||
throws Exception {
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
super.waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, verbose, true, timeoutSeconds);
|
||||
}
|
||||
|
||||
|
@ -756,7 +752,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
String failMessage = null;
|
||||
if (verbose) System.err.println("check const of " + shard);
|
||||
int cnt = 0;
|
||||
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
assertEquals(
|
||||
"The client count does not match up with the shard count for slice:"
|
||||
+ shard,
|
||||
|
@ -968,7 +964,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
long count = 0;
|
||||
String currentState = cjetty.info.getStr(ZkStateReader.STATE_PROP);
|
||||
if (currentState != null
|
||||
|
@ -1167,13 +1163,12 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
if (VERBOSE || printLayoutOnTearDown) {
|
||||
super.printLayout();
|
||||
}
|
||||
((HttpSolrServer) controlClient).shutdown();
|
||||
if (controlClient != null) {
|
||||
((HttpSolrServer) controlClient).shutdown();
|
||||
}
|
||||
if (cloudClient != null) {
|
||||
cloudClient.shutdown();
|
||||
}
|
||||
if (zkStateReader != null) {
|
||||
zkStateReader.close();
|
||||
}
|
||||
super.tearDown();
|
||||
|
||||
System.clearProperty("zkHost");
|
||||
|
@ -1186,7 +1181,9 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
}
|
||||
|
||||
protected void destroyServers() throws Exception {
|
||||
ChaosMonkey.stop(controlJetty);
|
||||
if (controlJetty != null) {
|
||||
ChaosMonkey.stop(controlJetty);
|
||||
}
|
||||
for (JettySolrRunner jetty : jettys) {
|
||||
try {
|
||||
ChaosMonkey.stop(jetty);
|
||||
|
@ -1216,13 +1213,25 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
|
||||
protected void waitToSeeNotLive(ZkStateReader zkStateReader,
|
||||
CloudJettyRunner cjetty) throws InterruptedException {
|
||||
waitToSeeNotLive(zkStateReader, cjetty, 0);
|
||||
}
|
||||
|
||||
protected void waitToSeeNotLive(ZkStateReader zkStateReader,
|
||||
CloudJettyRunner cjetty, int cnt) throws InterruptedException {
|
||||
int tries = 0;
|
||||
while (zkStateReader.getClusterState()
|
||||
.liveNodesContain(cjetty.info.getStr(ZkStateReader.NODE_NAME_PROP))) {
|
||||
if (tries++ == 220) {
|
||||
fail("Shard still reported as live in zk");
|
||||
ClusterState clusterState = zkStateReader.getClusterState();
|
||||
while (clusterState.liveNodesContain(cjetty.info
|
||||
.getStr(ZkStateReader.NODE_NAME_PROP))) {
|
||||
System.out.println("scs:"
|
||||
+ zkStateReader.getClusterState().getZkClusterStateVersion() + " "
|
||||
+ zkStateReader.getClusterState().getLiveNodes());
|
||||
System.out.println("see live nodes:"
|
||||
+ zkStateReader.getClusterState().getLiveNodes());
|
||||
if (tries++ == 30) {
|
||||
fail("Shard still reported as live in zk - " + cnt + " jetty");
|
||||
}
|
||||
Thread.sleep(1000);
|
||||
|
||||
clusterState = zkStateReader.getClusterState();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -189,7 +189,7 @@ public class ChaosMonkey {
|
|||
}
|
||||
|
||||
private static void stopJettySolrRunner(JettySolrRunner jetty) throws Exception {
|
||||
|
||||
assert(jetty != null);
|
||||
monkeyLog("stop shard! " + jetty.getLocalPort());
|
||||
// get a clean shutdown so that no dirs are left open...
|
||||
FilterHolder fh = jetty.getDispatchFilter();
|
||||
|
|
|
@ -17,7 +17,6 @@ package org.apache.solr.core;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -27,7 +26,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
/**
|
||||
* Opens a directory with {@link LuceneTestCase#newDirectory()}
|
||||
*/
|
||||
public class MockDirectoryFactory extends CachingDirectoryFactory {
|
||||
public class MockDirectoryFactory extends EphemeralDirectoryFactory {
|
||||
|
||||
@Override
|
||||
protected Directory create(String path) throws IOException {
|
||||
|
@ -41,23 +40,13 @@ public class MockDirectoryFactory extends CachingDirectoryFactory {
|
|||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setAssertNoUnrefencedFilesOnClose(false);
|
||||
}
|
||||
|
||||
// ram dirs in cores that are restarted end up empty
|
||||
// and check index fails
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setCheckIndexOnClose(false);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists(String path) {
|
||||
String fullPath = new File(path).getAbsolutePath();
|
||||
synchronized (this) {
|
||||
CacheValue cacheValue = byPathCache.get(fullPath);
|
||||
Directory directory = null;
|
||||
if (cacheValue != null) {
|
||||
directory = cacheValue.directory;
|
||||
}
|
||||
if (directory == null) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
/**
|
||||
* Opens a directory with {@link LuceneTestCase#newFSDirectory(File)}
|
||||
*/
|
||||
public class MockFSDirectoryFactory extends CachingDirectoryFactory {
|
||||
public class MockFSDirectoryFactory extends StandardDirectoryFactory {
|
||||
|
||||
@Override
|
||||
public Directory create(String path) throws IOException {
|
||||
|
|
|
@ -19,29 +19,18 @@
|
|||
package org.apache.solr.util;
|
||||
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.QuickPatchThreadsFilter;
|
||||
import org.apache.solr.SolrIgnoredThreadsFilter;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.*;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.util.XML;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.junit.*;
|
||||
import org.junit.rules.RuleChain;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
||||
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
|
||||
|
||||
/**
|
||||
* An Abstract base class that makes writing Solr JUnit tests "easier"
|
||||
|
@ -60,39 +49,9 @@ import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
|
|||
SolrIgnoredThreadsFilter.class,
|
||||
QuickPatchThreadsFilter.class
|
||||
})
|
||||
public abstract class AbstractSolrTestCase extends LuceneTestCase {
|
||||
public abstract class AbstractSolrTestCase extends SolrTestCaseJ4 {
|
||||
protected SolrConfig solrConfig;
|
||||
|
||||
/**
|
||||
* Harness initialized by initTestHarness.
|
||||
*
|
||||
* <p>
|
||||
* For use in test methods as needed.
|
||||
* </p>
|
||||
*/
|
||||
protected TestHarness h;
|
||||
|
||||
/**
|
||||
* LocalRequestFactory initialized by initTestHarness using sensible
|
||||
* defaults.
|
||||
*
|
||||
* <p>
|
||||
* For use in test methods as needed.
|
||||
* </p>
|
||||
*/
|
||||
protected TestHarness.LocalRequestFactory lrf;
|
||||
|
||||
/**
|
||||
* Subclasses must define this method to return the name of the
|
||||
* schema.xml they wish to use.
|
||||
*/
|
||||
public abstract String getSchemaFile();
|
||||
|
||||
/**
|
||||
* Subclasses must define this method to return the name of the
|
||||
* solrconfig.xml they wish to use.
|
||||
*/
|
||||
public abstract String getSolrConfigFile();
|
||||
|
||||
/**
|
||||
* Subclasses can override this to change a test's solr home
|
||||
|
@ -101,69 +60,9 @@ public abstract class AbstractSolrTestCase extends LuceneTestCase {
|
|||
public String getSolrHome() {
|
||||
return SolrTestCaseJ4.TEST_HOME();
|
||||
}
|
||||
|
||||
@ClassRule
|
||||
public static TestRule solrClassRules =
|
||||
RuleChain.outerRule(new SystemPropertiesRestoreRule())
|
||||
.around(new RevertDefaultThreadHandlerRule());
|
||||
|
||||
@Rule
|
||||
public TestRule solrTestRules =
|
||||
RuleChain.outerRule(new SystemPropertiesRestoreRule());
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClassAbstractSolrTestCase() {
|
||||
SolrTestCaseJ4.startTrackingSearchers();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassAbstractSolrTestCase() {
|
||||
SolrTestCaseJ4.endTrackingSearchers();
|
||||
}
|
||||
|
||||
/**
|
||||
* The directory used to story the index managed by the TestHarness h
|
||||
*/
|
||||
protected File dataDir;
|
||||
|
||||
public static Logger log = LoggerFactory.getLogger(AbstractSolrTestCase.class);
|
||||
|
||||
private String factoryProp;
|
||||
|
||||
/**
|
||||
* Initializes things your test might need
|
||||
*
|
||||
* <ul>
|
||||
* <li>Creates a dataDir in the "java.io.tmpdir"</li>
|
||||
* <li>initializes the TestHarness h using this data directory, and getSchemaPath()</li>
|
||||
* <li>initializes the LocalRequestFactory lrf using sensible defaults.</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
log.info("####SETUP_START " + getTestName());
|
||||
ignoreException("ignore_exception");
|
||||
factoryProp = System.getProperty("solr.directoryFactory");
|
||||
if (factoryProp == null) {
|
||||
System.setProperty("solr.directoryFactory","solr.RAMDirectoryFactory");
|
||||
}
|
||||
dataDir = new File(TEMP_DIR,
|
||||
getClass().getName() + "-" + System.currentTimeMillis());
|
||||
dataDir.mkdirs();
|
||||
String configFile = getSolrConfigFile();
|
||||
System.setProperty("solr.solr.home", getSolrHome());
|
||||
if (configFile != null) {
|
||||
|
||||
solrConfig = TestHarness.createConfig(getSolrHome(), getSolrConfigFile());
|
||||
h = new TestHarness( dataDir.getAbsolutePath(),
|
||||
solrConfig,
|
||||
getSchemaFile());
|
||||
lrf = h.getRequestFactory
|
||||
("standard",0,20,CommonParams.VERSION,"2.2");
|
||||
}
|
||||
log.info("####SETUP_END " + getTestName());
|
||||
}
|
||||
|
||||
/** Causes an exception matching the regex pattern to not be logged. */
|
||||
public static void ignoreException(String pattern) {
|
||||
|
@ -194,145 +93,7 @@ public abstract class AbstractSolrTestCase extends LuceneTestCase {
|
|||
log.info("####PRETEARDOWN " + getTestName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Shuts down the test harness, and makes the best attempt possible
|
||||
* to delete dataDir, unless the system property "solr.test.leavedatadir"
|
||||
* is set.
|
||||
*/
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
log.info("####TEARDOWN_START " + getTestName());
|
||||
if (factoryProp == null) {
|
||||
System.clearProperty("solr.directoryFactory");
|
||||
}
|
||||
|
||||
if (h != null) { h.close(); }
|
||||
String skip = System.getProperty("solr.test.leavedatadir");
|
||||
if (null != skip && 0 != skip.trim().length()) {
|
||||
System.err.println("NOTE: per solr.test.leavedatadir, dataDir will not be removed: " + dataDir.getAbsolutePath());
|
||||
} else {
|
||||
if (!recurseDelete(dataDir)) {
|
||||
System.err.println("!!!! WARNING: best effort to remove " + dataDir.getAbsolutePath() + " FAILED !!!!!");
|
||||
}
|
||||
}
|
||||
|
||||
resetExceptionIgnores();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
/** Validates an update XML String is successful
|
||||
*/
|
||||
public void assertU(String update) {
|
||||
assertU(null, update);
|
||||
}
|
||||
|
||||
/** Validates an update XML String is successful
|
||||
*/
|
||||
public void assertU(String message, String update) {
|
||||
checkUpdateU(message, update, true);
|
||||
}
|
||||
|
||||
/** Validates an update XML String failed
|
||||
*/
|
||||
public void assertFailedU(String update) {
|
||||
assertFailedU(null, update);
|
||||
}
|
||||
|
||||
/** Validates an update XML String failed
|
||||
*/
|
||||
public void assertFailedU(String message, String update) {
|
||||
checkUpdateU(message, update, false);
|
||||
}
|
||||
|
||||
/** Checks the success or failure of an update message
|
||||
*/
|
||||
private void checkUpdateU(String message, String update, boolean shouldSucceed) {
|
||||
try {
|
||||
String m = (null == message) ? "" : message + " ";
|
||||
if (shouldSucceed) {
|
||||
String res = h.validateUpdate(update);
|
||||
if (res != null) fail(m + "update was not successful: " + res);
|
||||
} else {
|
||||
String res = h.validateErrorUpdate(update);
|
||||
if (res != null) fail(m + "update succeeded, but should have failed: " + res);
|
||||
}
|
||||
} catch (SAXException e) {
|
||||
throw new RuntimeException("Invalid XML", e);
|
||||
}
|
||||
}
|
||||
|
||||
/** Validates a query matches some XPath test expressions and closes the query */
|
||||
public void assertQ(SolrQueryRequest req, String... tests) {
|
||||
assertQ(null, req, tests);
|
||||
}
|
||||
|
||||
/** Validates a query matches some XPath test expressions and closes the query */
|
||||
public void assertQ(String message, SolrQueryRequest req, String... tests) {
|
||||
try {
|
||||
String m = (null == message) ? "" : message + " ";
|
||||
String response = h.query(req);
|
||||
String results = h.validateXPath(response, tests);
|
||||
if (null != results) {
|
||||
fail(m + "query failed XPath: " + results +
|
||||
"\n xml response was: " + response +
|
||||
"\n request was: " + req.getParamString());
|
||||
}
|
||||
} catch (XPathExpressionException e1) {
|
||||
throw new RuntimeException("XPath is invalid", e1);
|
||||
} catch (Exception e2) {
|
||||
throw new RuntimeException("Exception during query", e2);
|
||||
}
|
||||
}
|
||||
|
||||
/** Makes sure a query throws a SolrException with the listed response code */
|
||||
public void assertQEx(String message, SolrQueryRequest req, int code ) {
|
||||
try {
|
||||
h.query(req);
|
||||
fail( message );
|
||||
} catch (SolrException sex) {
|
||||
assertEquals( code, sex.code() );
|
||||
} catch (Exception e2) {
|
||||
throw new RuntimeException("Exception during query", e2);
|
||||
}
|
||||
}
|
||||
|
||||
public void assertQEx(String message, SolrQueryRequest req, SolrException.ErrorCode code ) {
|
||||
try {
|
||||
h.query(req);
|
||||
fail( message );
|
||||
} catch (SolrException e) {
|
||||
assertEquals( code.code, e.code() );
|
||||
} catch (Exception e2) {
|
||||
throw new RuntimeException("Exception during query", e2);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @see TestHarness#optimize
|
||||
*/
|
||||
public String optimize(String... args) {
|
||||
return TestHarness.optimize(args);
|
||||
}
|
||||
/**
|
||||
* @see TestHarness#commit
|
||||
*/
|
||||
public String commit(String... args) {
|
||||
return TestHarness.commit(args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a simple <add><doc>... XML String with no options
|
||||
*
|
||||
* @param fieldsAndValues 0th and Even numbered args are fields names odds are field values.
|
||||
* @see #add
|
||||
* @see #doc
|
||||
*/
|
||||
public String adoc(String... fieldsAndValues) {
|
||||
Doc d = doc(fieldsAndValues);
|
||||
return add(d);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a simple <add><doc>... XML String with the
|
||||
* commitWithin attribute.
|
||||
|
@ -343,53 +104,10 @@ public abstract class AbstractSolrTestCase extends LuceneTestCase {
|
|||
* @see #doc
|
||||
*/
|
||||
public String adoc(int commitWithin, String... fieldsAndValues) {
|
||||
Doc d = doc(fieldsAndValues);
|
||||
XmlDoc d = doc(fieldsAndValues);
|
||||
return add(d, "commitWithin", String.valueOf(commitWithin));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a simple <add><doc>... XML String with no options
|
||||
*/
|
||||
public String adoc(SolrInputDocument sdoc) {
|
||||
List<String> fields = new ArrayList<String>();
|
||||
for (SolrInputField sf : sdoc) {
|
||||
for (Object o : sf.getValues()) {
|
||||
fields.add(sf.getName());
|
||||
fields.add(o.toString());
|
||||
}
|
||||
}
|
||||
return adoc(fields.toArray(new String[fields.size()]));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generates an <add><doc>... XML String with options
|
||||
* on the add.
|
||||
*
|
||||
* @param doc the Document to add
|
||||
* @param args 0th and Even numbered args are param names, Odds are param values.
|
||||
* @see #add
|
||||
* @see #doc
|
||||
*/
|
||||
public String add(Doc doc, String... args) {
|
||||
try {
|
||||
StringWriter r = new StringWriter();
|
||||
|
||||
// this is anoying
|
||||
if (null == args || 0 == args.length) {
|
||||
r.write("<add>");
|
||||
r.write(doc.xml);
|
||||
r.write("</add>");
|
||||
} else {
|
||||
XML.writeUnescapedXML(r, "add", doc.xml, (Object[])args);
|
||||
}
|
||||
|
||||
return r.getBuffer().toString();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException
|
||||
("this should never happen with a StringWriter", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a <delete>... XML string for an ID
|
||||
|
@ -408,49 +126,7 @@ public abstract class AbstractSolrTestCase extends LuceneTestCase {
|
|||
public String delQ(String q, String... args) {
|
||||
return TestHarness.deleteByQuery(q, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a simple <doc>... XML String with no options
|
||||
*
|
||||
* @param fieldsAndValues 0th and Even numbered args are fields names, Odds are field values.
|
||||
* @see TestHarness#makeSimpleDoc
|
||||
*/
|
||||
public Doc doc(String... fieldsAndValues) {
|
||||
Doc d = new Doc();
|
||||
d.xml = TestHarness.makeSimpleDoc(fieldsAndValues).toString();
|
||||
return d;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a SolrQueryRequest using the LocalRequestFactory
|
||||
* @see #lrf
|
||||
*/
|
||||
public SolrQueryRequest req(String... q) {
|
||||
return lrf.makeRequest(q);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a SolrQueryRequest using the LocalRequestFactory
|
||||
* @see #lrf
|
||||
*/
|
||||
public SolrQueryRequest req(String[] params, String... moreParams) {
|
||||
String[] allParams = moreParams;
|
||||
if (params.length!=0) {
|
||||
int len = params.length + moreParams.length;
|
||||
allParams = new String[len];
|
||||
System.arraycopy(params,0,allParams,0,params.length);
|
||||
System.arraycopy(moreParams,0,allParams,params.length,moreParams.length);
|
||||
}
|
||||
|
||||
return lrf.makeRequest(allParams);
|
||||
}
|
||||
|
||||
/** Neccessary to make method signatures un-ambiguous */
|
||||
public static class Doc {
|
||||
public String xml;
|
||||
@Override
|
||||
public String toString() { return xml; }
|
||||
}
|
||||
|
||||
public static boolean recurseDelete(File f) {
|
||||
if (f.isDirectory()) {
|
||||
|
|
|
@ -72,7 +72,7 @@ import java.util.Map;
|
|||
*/
|
||||
public class TestHarness {
|
||||
String coreName;
|
||||
protected CoreContainer container;
|
||||
protected volatile CoreContainer container;
|
||||
private final ThreadLocal<DocumentBuilder> builderTL = new ThreadLocal<DocumentBuilder>();
|
||||
private final ThreadLocal<XPath> xpathTL = new ThreadLocal<XPath>();
|
||||
public UpdateRequestHandler updater;
|
||||
|
@ -124,6 +124,7 @@ public class TestHarness {
|
|||
|
||||
public TestHarness(String coreName, CoreContainer.Initializer init) {
|
||||
try {
|
||||
|
||||
container = init.initialize();
|
||||
if (coreName == null)
|
||||
coreName = CoreContainer.DEFAULT_DEFAULT_CORE_NAME;
|
||||
|
|
Loading…
Reference in New Issue