mirror of https://github.com/apache/lucene.git
SOLR-2551 -- Check dataimport.properties for write access (if delta-import is supported in DIH configuration) before starting an import
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1135954 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f4bfaca2ec
commit
6aaaf9ca94
|
@ -14,7 +14,8 @@ $Id$
|
||||||
|
|
||||||
================== 3.3.0-dev ==============
|
================== 3.3.0-dev ==============
|
||||||
|
|
||||||
(No Changes)
|
* SOLR-2551: Check dataimport.properties for write access (if delta-import is supported
|
||||||
|
in DIH configuration) before starting an import (C S, shalin)
|
||||||
|
|
||||||
================== 3.2.0 ==================
|
================== 3.2.0 ==================
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.commons.io.IOUtils;
|
||||||
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
import java.io.File;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
@ -84,6 +85,8 @@ public class DataImporter {
|
||||||
|
|
||||||
private final Map<String , Object> coreScopeSession;
|
private final Map<String , Object> coreScopeSession;
|
||||||
|
|
||||||
|
private boolean isDeltaImportSupported = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Only for testing purposes
|
* Only for testing purposes
|
||||||
*/
|
*/
|
||||||
|
@ -112,6 +115,8 @@ public class DataImporter {
|
||||||
initEntity(e, fields, false);
|
initEntity(e, fields, false);
|
||||||
verifyWithSchema(fields);
|
verifyWithSchema(fields);
|
||||||
identifyPk(e);
|
identifyPk(e);
|
||||||
|
if (e.allAttributes.containsKey(SqlEntityProcessor.DELTA_QUERY))
|
||||||
|
isDeltaImportSupported = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,6 +354,7 @@ public class DataImporter {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
docBuilder = new DocBuilder(this, writer, requestParams);
|
docBuilder = new DocBuilder(this, writer, requestParams);
|
||||||
|
checkWritablePersistFile(writer);
|
||||||
docBuilder.execute();
|
docBuilder.execute();
|
||||||
if (!requestParams.debug)
|
if (!requestParams.debug)
|
||||||
cumulativeStatistics.add(docBuilder.importStatistics);
|
cumulativeStatistics.add(docBuilder.importStatistics);
|
||||||
|
@ -363,6 +369,15 @@ public class DataImporter {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void checkWritablePersistFile(SolrWriter writer) {
|
||||||
|
File persistFile = writer.getPersistFile();
|
||||||
|
boolean isWritable = persistFile.exists() ? persistFile.canWrite() : persistFile.getParentFile().canWrite();
|
||||||
|
if (isDeltaImportSupported && !isWritable) {
|
||||||
|
throw new DataImportHandlerException(SEVERE, persistFile.getAbsolutePath() +
|
||||||
|
" is not writable. Delta imports are supported by data config but will not work.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void doDeltaImport(SolrWriter writer, RequestParams requestParams) {
|
public void doDeltaImport(SolrWriter writer, RequestParams requestParams) {
|
||||||
LOG.info("Starting Delta Import");
|
LOG.info("Starting Delta Import");
|
||||||
setStatus(Status.RUNNING_DELTA_DUMP);
|
setStatus(Status.RUNNING_DELTA_DUMP);
|
||||||
|
@ -370,6 +385,7 @@ public class DataImporter {
|
||||||
try {
|
try {
|
||||||
setIndexStartTime(new Date());
|
setIndexStartTime(new Date());
|
||||||
docBuilder = new DocBuilder(this, writer, requestParams);
|
docBuilder = new DocBuilder(this, writer, requestParams);
|
||||||
|
checkWritablePersistFile(writer);
|
||||||
docBuilder.execute();
|
docBuilder.execute();
|
||||||
if (!requestParams.debug)
|
if (!requestParams.debug)
|
||||||
cumulativeStatistics.add(docBuilder.importStatistics);
|
cumulativeStatistics.add(docBuilder.importStatistics);
|
||||||
|
|
|
@ -99,13 +99,10 @@ public class SolrWriter {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
props.putAll(p);
|
props.putAll(p);
|
||||||
String filePath = configDir;
|
File persistFile = getPersistFile();
|
||||||
if (configDir != null && !configDir.endsWith(File.separator))
|
propOutput = new FileOutputStream(persistFile);
|
||||||
filePath += File.separator;
|
|
||||||
filePath += persistFilename;
|
|
||||||
propOutput = new FileOutputStream(filePath);
|
|
||||||
props.store(propOutput, null);
|
props.store(propOutput, null);
|
||||||
log.info("Wrote last indexed time to " + persistFilename);
|
log.info("Wrote last indexed time to " + persistFile.getAbsolutePath());
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
|
throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
|
||||||
"Unable to persist Index Start Time", e);
|
"Unable to persist Index Start Time", e);
|
||||||
|
@ -122,6 +119,14 @@ public class SolrWriter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
File getPersistFile() {
|
||||||
|
String filePath = configDir;
|
||||||
|
if (configDir != null && !configDir.endsWith(File.separator))
|
||||||
|
filePath += File.separator;
|
||||||
|
filePath += persistFilename;
|
||||||
|
return new File(filePath);
|
||||||
|
}
|
||||||
|
|
||||||
void finish() {
|
void finish() {
|
||||||
try {
|
try {
|
||||||
processor.finish();
|
processor.finish();
|
||||||
|
|
|
@ -20,6 +20,8 @@ import org.junit.Before;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -93,6 +95,36 @@ public class TestSqlEntityProcessorDelta extends AbstractDataImportHandlerTestCa
|
||||||
add1document();
|
add1document();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public void testNonWritablePersistFile() throws Exception {
|
||||||
|
// See SOLR-2551
|
||||||
|
String configDir = h.getCore().getResourceLoader().getConfigDir();
|
||||||
|
String filePath = configDir;
|
||||||
|
if (configDir != null && !configDir.endsWith(File.separator))
|
||||||
|
filePath += File.separator;
|
||||||
|
filePath += "dataimport.properties";
|
||||||
|
File f = new File(filePath);
|
||||||
|
// execute the test only if we are able to set file to read only mode
|
||||||
|
if ((f.exists() || f.createNewFile()) && f.setReadOnly()) {
|
||||||
|
try {
|
||||||
|
List parentRow = new ArrayList();
|
||||||
|
parentRow.add(createMap("id", "1"));
|
||||||
|
MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator());
|
||||||
|
|
||||||
|
List childRow = new ArrayList();
|
||||||
|
childRow.add(createMap("desc", "hello"));
|
||||||
|
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||||
|
.iterator());
|
||||||
|
|
||||||
|
runFullImport(dataConfig_delta);
|
||||||
|
assertQ(req("id:1"), "//*[@numFound='0']");
|
||||||
|
} finally {
|
||||||
|
f.setWritable(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// WORKS
|
// WORKS
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
Loading…
Reference in New Issue