SOLR-3911: write out replication stats through Directory

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1418756 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2012-12-08 21:26:55 +00:00
parent 376cf5cd78
commit dffe8d2877
4 changed files with 154 additions and 102 deletions

View File

@ -17,6 +17,40 @@
package org.apache.solr.core;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.DirectoryReader;
@ -67,16 +101,17 @@ import org.apache.solr.search.ValueSourceParser;
import org.apache.solr.update.DefaultSolrCoreState;
import org.apache.solr.update.DirectUpdateHandler2;
import org.apache.solr.update.SolrCoreState;
import org.apache.solr.update.SolrCoreState.IndexWriterCloser;
import org.apache.solr.update.SolrIndexWriter;
import org.apache.solr.update.UpdateHandler;
import org.apache.solr.update.VersionInfo;
import org.apache.solr.update.SolrCoreState.IndexWriterCloser;
import org.apache.solr.update.processor.DistributedUpdateProcessorFactory;
import org.apache.solr.update.processor.LogUpdateProcessorFactory;
import org.apache.solr.update.processor.RunUpdateProcessorFactory;
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.PropertiesInputStream;
import org.apache.solr.util.RefCounted;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
import org.apache.solr.util.plugin.PluginInfoInitialized;
@ -85,41 +120,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
/**
*
@ -239,26 +239,7 @@ public final class SolrCore implements SolrInfoMBean {
if (dir.fileExists("index.properties")){
final IndexInput input = dir.openInput("index.properties", IOContext.DEFAULT);
final InputStream is = new InputStream() {
@Override
public int read() throws IOException {
byte next;
try {
next = input.readByte();
} catch (EOFException e) {
return -1;
}
return next;
}
@Override
public void close() throws IOException {
super.close();
input.close();
}
};
final InputStream is = new PropertiesInputStream(input);
try {
p.load(is);

View File

@ -36,10 +36,7 @@ import static org.apache.solr.handler.ReplicationHandler.NAME;
import static org.apache.solr.handler.ReplicationHandler.OFFSET;
import static org.apache.solr.handler.ReplicationHandler.SIZE;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
@ -99,6 +96,8 @@ import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.FileUtils;
import org.apache.solr.util.PropertiesInputStream;
import org.apache.solr.util.PropertiesOutputStream;
import org.apache.solr.util.RefCounted;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -522,9 +521,9 @@ public class SnapPuller {
/**
* Helper method to record the last replication's details so that we can show them on the statistics page across
* restarts.
* @throws IOException on IO error
*/
private void logReplicationTimeAndConfFiles(Collection<Map<String, Object>> modifiedConfFiles, boolean successfulInstall) {
FileOutputStream outFile = null;
private void logReplicationTimeAndConfFiles(Collection<Map<String, Object>> modifiedConfFiles, boolean successfulInstall) throws IOException {
List<String> confFiles = new ArrayList<String>();
if (modifiedConfFiles != null && !modifiedConfFiles.isEmpty())
for (Map<String, Object> map1 : modifiedConfFiles)
@ -533,7 +532,10 @@ public class SnapPuller {
Properties props = replicationHandler.loadReplicationProperties();
long replicationTime = System.currentTimeMillis();
long replicationTimeTaken = (replicationTime - getReplicationStartTime()) / 1000;
Directory dir = null;
try {
dir = solrCore.getDirectoryFactory().get(solrCore.getDataDir(), solrCore.getSolrConfig().indexConfig.lockType);
int indexCount = 1, confFilesCount = 1;
if (props.containsKey(TIMES_INDEX_REPLICATED)) {
indexCount = Integer.valueOf(props.getProperty(TIMES_INDEX_REPLICATED)) + 1;
@ -563,15 +565,20 @@ public class SnapPuller {
sb = readToStringBuffer(replicationTime, props.getProperty(REPLICATION_FAILED_AT_LIST));
props.setProperty(REPLICATION_FAILED_AT_LIST, sb.toString());
}
File f = new File(solrCore.getDataDir(), REPLICATION_PROPERTIES);
outFile = new FileOutputStream(f);
props.store(outFile, "Replication details");
outFile.close();
final IndexOutput out = dir.createOutput(REPLICATION_PROPERTIES, IOContext.DEFAULT);
OutputStream outFile = new PropertiesOutputStream(out);
try {
props.store(outFile, "Replication details");
} finally {
IOUtils.closeQuietly(outFile);
}
} catch (Exception e) {
LOG.warn("Exception while updating statistics", e);
}
finally {
IOUtils.closeQuietly(outFile);
} finally {
if (dir != null) {
solrCore.getDirectoryFactory().release(dir);
}
}
}
@ -836,26 +843,7 @@ public class SnapPuller {
if (dir.fileExists("index.properties")){
final IndexInput input = dir.openInput("index.properties", IOContext.DEFAULT);
final InputStream is = new InputStream() {
@Override
public int read() throws IOException {
byte next;
try {
next = input.readByte();
} catch (EOFException e) {
return -1;
}
return next;
}
@Override
public void close() throws IOException {
super.close();
input.close();
}
};
final InputStream is = new PropertiesInputStream(input);
try {
p.load(is);
} catch (Exception e) {
@ -873,19 +861,7 @@ public class SnapPuller {
p.put("index", tmpIdxDirName);
OutputStream os = null;
try {
os = new OutputStream() {
@Override
public void write(int b) throws IOException {
out.writeByte((byte) b);
}
@Override
public void close() throws IOException {
super.close();
out.close();
}
};
os = new PropertiesOutputStream(out);
p.store(os, "index properties");
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
@ -893,7 +869,7 @@ public class SnapPuller {
} finally {
IOUtils.closeQuietly(os);
}
return true;
return true;
} catch (IOException e1) {
throw new RuntimeException(e1);

View File

@ -0,0 +1,51 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import org.apache.lucene.store.IndexInput;
public class PropertiesInputStream extends InputStream {
private IndexInput is;
public PropertiesInputStream(IndexInput is) {
this.is = is;
}
@Override
public int read() throws IOException {
byte next;
try {
next = is.readByte();
} catch (EOFException e) {
return -1;
}
return next;
}
@Override
public void close() throws IOException {
super.close();
is.close();
}
}

View File

@ -0,0 +1,44 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.OutputStream;
import org.apache.lucene.store.IndexOutput;
public class PropertiesOutputStream extends OutputStream {
private IndexOutput out;
public PropertiesOutputStream(IndexOutput out) {
this.out = out;
}
@Override
public void write(int b) throws IOException {
out.writeByte((byte) b);
}
@Override
public void close() throws IOException {
super.close();
out.close();
}
}