mirror of https://github.com/apache/lucene.git
SOLR-9621: Remove several Guava & Apache Commons calls in favor of java 8 alternatives.
This commit is contained in:
parent
3b49705c43
commit
2e21511cd3
|
@ -34,3 +34,23 @@ java.util.logging.**
|
|||
|
||||
@defaultMessage Use RTimer/TimeOut/System.nanoTime for time comparisons, and `new Date()` output/debugging/stats of timestamps. If for some miscellaneous reason, you absolutely need to use this, use a SuppressForbidden.
|
||||
java.lang.System#currentTimeMillis()
|
||||
|
||||
@defaultMessage Use corresponding Java 8 functional/streaming interfaces
|
||||
com.google.common.base.Function
|
||||
com.google.common.base.Joiner
|
||||
com.google.common.base.Predicate
|
||||
com.google.common.base.Supplier
|
||||
|
||||
@defaultMessage Use java.nio.charset.StandardCharsets instead
|
||||
com.google.common.base.Charsets
|
||||
org.apache.commons.codec.Charsets
|
||||
|
||||
@defaultMessage Use methods in java.util.Objects instead
|
||||
com.google.common.base.Objects#equal(java.lang.Object,java.lang.Object)
|
||||
com.google.common.base.Objects#hashCode(java.lang.Object[])
|
||||
com.google.common.base.Preconditions#checkNotNull(java.lang.Object)
|
||||
com.google.common.base.Preconditions#checkNotNull(java.lang.Object,java.lang.Object)
|
||||
|
||||
@defaultMessage Use methods in java.util.Comparator instead
|
||||
com.google.common.collect.Ordering
|
||||
|
||||
|
|
|
@ -63,6 +63,28 @@ Optimizations
|
|||
creation of a Lucene FieldType every time a field is indexed. (John Call, yonik)
|
||||
|
||||
|
||||
================== 6.4.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
||||
Versions of Major Components
|
||||
---------------------
|
||||
Apache Tika 1.13
|
||||
Carrot2 3.12.0
|
||||
Velocity 1.7 and Velocity Tools 2.0
|
||||
Apache UIMA 2.3.1
|
||||
Apache ZooKeeper 3.4.6
|
||||
Jetty 9.3.8.v20160314
|
||||
|
||||
Detailed Change List
|
||||
----------------------
|
||||
|
||||
Other Changes
|
||||
----------------------
|
||||
|
||||
* SOLR-9621: Remove several Guava & Apache Commons calls in favor of java 8 alternatives.
|
||||
(Michael Braun via David Smiley)
|
||||
|
||||
================== 6.3.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
|
|
@ -23,8 +23,8 @@ import java.util.Collections;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.google.common.base.Supplier;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.solr.analytics.expression.Expression;
|
||||
import org.apache.solr.analytics.expression.ExpressionFactory;
|
||||
|
|
|
@ -23,8 +23,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.google.common.base.Supplier;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
|
||||
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.lang.invoke.MethodHandles;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
|
@ -52,7 +53,6 @@ import org.kitesdk.morphline.base.Notifications;
|
|||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.google.common.annotations.Beta;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.typesafe.config.Config;
|
||||
import com.typesafe.config.ConfigFactory;
|
||||
|
||||
|
@ -109,7 +109,8 @@ public final class MorphlineMapRunner {
|
|||
for (Map.Entry<String,String> entry : configuration) {
|
||||
map.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
LOG.trace("Configuration:\n{}", Joiner.on("\n").join(map.entrySet()));
|
||||
LOG.trace("Configuration:\n" +
|
||||
map.entrySet().stream().map(Object::toString).collect(Collectors.joining("\n")));
|
||||
}
|
||||
|
||||
FaultTolerance faultTolerance = new FaultTolerance(
|
||||
|
|
|
@ -27,10 +27,9 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TreeMap;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.io.Closeables;
|
||||
|
@ -114,9 +113,12 @@ public final class SolrCellBuilder implements CommandBuilder {
|
|||
Config solrLocatorConfig = getConfigs().getConfig(config, "solrLocator");
|
||||
SolrLocator locator = new SolrLocator(solrLocatorConfig, context);
|
||||
LOG.debug("solrLocator: {}", locator);
|
||||
this.schema = locator.getIndexSchema();
|
||||
Preconditions.checkNotNull(schema);
|
||||
LOG.trace("Solr schema: \n{}", Joiner.on("\n").join(new TreeMap<>(schema.getFields()).values()));
|
||||
this.schema = Objects.requireNonNull(locator.getIndexSchema());
|
||||
if (LOG.isTraceEnabled()) {
|
||||
LOG.trace("Solr schema: \n" + schema.getFields().entrySet().stream()
|
||||
.sorted(Map.Entry.comparingByKey()).map(Map.Entry::getValue).map(Object::toString)
|
||||
.collect(Collectors.joining("\n")));
|
||||
}
|
||||
|
||||
ListMultimap<String, String> cellParams = ArrayListMultimap.create();
|
||||
String uprefix = getConfigs().getString(config, ExtractingParams.UNKNOWN_FIELD_PREFIX, null);
|
||||
|
|
|
@ -20,7 +20,8 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
|
||||
|
@ -29,8 +30,6 @@ import org.kitesdk.morphline.api.CommandBuilder;
|
|||
import org.kitesdk.morphline.api.MorphlineContext;
|
||||
import org.kitesdk.morphline.api.Record;
|
||||
import org.kitesdk.morphline.base.AbstractCommand;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.typesafe.config.Config;
|
||||
|
||||
/**
|
||||
|
@ -68,9 +67,13 @@ public final class SanitizeUnknownSolrFieldsBuilder implements CommandBuilder {
|
|||
Config solrLocatorConfig = getConfigs().getConfig(config, "solrLocator");
|
||||
SolrLocator locator = new SolrLocator(solrLocatorConfig, context);
|
||||
LOG.debug("solrLocator: {}", locator);
|
||||
this.schema = locator.getIndexSchema();
|
||||
Preconditions.checkNotNull(schema);
|
||||
LOG.trace("Solr schema: \n{}", Joiner.on("\n").join(new TreeMap(schema.getFields()).values()));
|
||||
this.schema = Objects.requireNonNull(locator.getIndexSchema());
|
||||
if (LOG.isTraceEnabled()) {
|
||||
LOG.trace("Solr schema: \n" +
|
||||
schema.getFields().entrySet().stream().sorted(Map.Entry.comparingByKey())
|
||||
.map(Map.Entry::getValue).map(Object::toString).collect(Collectors.joining("\n"))
|
||||
);
|
||||
}
|
||||
|
||||
String str = getConfigs().getString(config, "renameToPrefix", "").trim();
|
||||
this.renameToPrefix = str.length() > 0 ? str : null;
|
||||
|
|
|
@ -21,8 +21,8 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.io.Files;
|
||||
import com.typesafe.config.Config;
|
||||
import com.typesafe.config.ConfigFactory;
|
||||
|
@ -64,8 +64,7 @@ public class SolrLocator {
|
|||
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
protected SolrLocator(MorphlineContext context) {
|
||||
Preconditions.checkNotNull(context);
|
||||
this.context = context;
|
||||
this.context = Objects.requireNonNull(context);
|
||||
}
|
||||
|
||||
public SolrLocator(Config config, MorphlineContext context) {
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.Reader;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
|
@ -35,7 +36,6 @@ import org.kitesdk.morphline.api.MorphlineContext;
|
|||
import org.kitesdk.morphline.api.MorphlineRuntimeException;
|
||||
import org.kitesdk.morphline.api.Record;
|
||||
import org.kitesdk.morphline.base.AbstractCommand;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.typesafe.config.Config;
|
||||
|
||||
/**
|
||||
|
@ -79,11 +79,9 @@ public final class TokenizeTextBuilder implements CommandBuilder {
|
|||
if (fieldType == null) {
|
||||
throw new MorphlineCompilationException("Missing Solr field type in schema.xml for name: " + solrFieldType, config);
|
||||
}
|
||||
this.analyzer = fieldType.getIndexAnalyzer();
|
||||
Preconditions.checkNotNull(analyzer);
|
||||
this.analyzer = Objects.requireNonNull(fieldType.getIndexAnalyzer());
|
||||
// register CharTermAttribute for later (implicit) reuse
|
||||
this.token = analyzer.tokenStream("content", reader).addAttribute(CharTermAttribute.class);
|
||||
Preconditions.checkNotNull(token);
|
||||
this.token = Objects.requireNonNull(analyzer.tokenStream("content", reader).addAttribute(CharTermAttribute.class));
|
||||
validateArguments();
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
package org.apache.solr.morphlines.solr;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.io.Files;
|
||||
import com.typesafe.config.Config;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
@ -108,10 +107,9 @@ public class AbstractSolrMorphlineTestBase extends SolrTestCaseJ4 {
|
|||
}
|
||||
|
||||
protected static void myInitCore(String baseDirName) throws Exception {
|
||||
Joiner joiner = Joiner.on(File.separator);
|
||||
String solrHome = RESOURCES_DIR + File.separator + baseDirName;
|
||||
initCore(
|
||||
"solrconfig.xml", "schema.xml",
|
||||
joiner.join(RESOURCES_DIR, baseDirName)
|
||||
"solrconfig.xml", "schema.xml", solrHome
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -24,9 +24,9 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.avro.file.DataFileReader;
|
||||
import org.apache.avro.file.FileReader;
|
||||
import org.apache.avro.generic.GenericData;
|
||||
|
@ -103,8 +103,7 @@ public class SolrMorphlineZkAvroTest extends AbstractSolrMorphlineZkTestBase {
|
|||
for (int i = 0; i < records.size(); i++) {
|
||||
// verify morphline spat out expected data
|
||||
Record actual = collector.getRecords().get(i);
|
||||
GenericData.Record expected = records.get(i);
|
||||
Preconditions.checkNotNull(expected);
|
||||
GenericData.Record expected = Objects.requireNonNull(records.get(i));
|
||||
assertTweetEquals(expected, actual, i);
|
||||
|
||||
// verify Solr result set contains expected data
|
||||
|
@ -119,8 +118,8 @@ public class SolrMorphlineZkAvroTest extends AbstractSolrMorphlineZkTestBase {
|
|||
}
|
||||
|
||||
private void assertTweetEquals(GenericData.Record expected, Record actual, int i) {
|
||||
Preconditions.checkNotNull(expected);
|
||||
Preconditions.checkNotNull(actual);
|
||||
Objects.requireNonNull(expected);
|
||||
Objects.requireNonNull(actual);
|
||||
// System.out.println("\n\nexpected: " + toString(expected));
|
||||
// System.out.println("actual: " + actual);
|
||||
String[] fieldNames = new String[] {
|
||||
|
|
|
@ -25,10 +25,10 @@ import java.util.TreeSet;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.Condition;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Predicate;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
|
@ -330,7 +330,7 @@ public class DistributedQueue {
|
|||
updateLock.lockInterruptibly();
|
||||
try {
|
||||
for (String child : knownChildren) {
|
||||
if (acceptFilter.apply(child)) {
|
||||
if (acceptFilter.test(child)) {
|
||||
foundChildren.add(child);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ import org.apache.zookeeper.KeeperException;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.apache.solr.common.params.CommonParams.AUTHC_PATH;
|
||||
import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH;
|
||||
import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
|
||||
|
@ -246,7 +246,7 @@ public class CoreContainer {
|
|||
public CoreContainer(NodeConfig config, Properties properties, CoresLocator locator, boolean asyncSolrCoreLoad) {
|
||||
this.loader = config.getSolrResourceLoader();
|
||||
this.solrHome = loader.getInstancePath().toString();
|
||||
this.cfg = checkNotNull(config);
|
||||
this.cfg = requireNonNull(config);
|
||||
this.coresLocator = locator;
|
||||
this.containerProperties = new Properties(properties);
|
||||
this.asyncSolrCoreLoad = asyncSolrCoreLoad;
|
||||
|
|
|
@ -39,6 +39,7 @@ import java.util.LinkedHashMap;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
@ -152,7 +153,6 @@ import org.apache.zookeeper.data.Stat;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
import static org.apache.solr.common.params.CommonParams.PATH;
|
||||
|
||||
|
@ -816,9 +816,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
|
|||
|
||||
assert ObjectReleaseTracker.track(searcherExecutor); // ensure that in unclean shutdown tests we still close this
|
||||
|
||||
checkNotNull(coreDescriptor, "coreDescriptor cannot be null");
|
||||
|
||||
this.coreDescriptor = coreDescriptor;
|
||||
this.coreDescriptor = Objects.requireNonNull(coreDescriptor, "coreDescriptor cannot be null");
|
||||
setName(name);
|
||||
MDCLoggingContext.setCore(this);
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.net.URI;
|
|||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
@ -68,8 +69,8 @@ public class BackupManager {
|
|||
protected final BackupRepository repository;
|
||||
|
||||
public BackupManager(BackupRepository repository, ZkStateReader zkStateReader, String collectionName) {
|
||||
this.repository = Preconditions.checkNotNull(repository);
|
||||
this.zkStateReader = Preconditions.checkNotNull(zkStateReader);
|
||||
this.repository = Objects.requireNonNull(repository);
|
||||
this.zkStateReader = Objects.requireNonNull(zkStateReader);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -88,8 +89,8 @@ public class BackupManager {
|
|||
* @throws IOException In case of errors.
|
||||
*/
|
||||
public Properties readBackupProperties(URI backupLoc, String backupId) throws IOException {
|
||||
Preconditions.checkNotNull(backupLoc);
|
||||
Preconditions.checkNotNull(backupId);
|
||||
Objects.requireNonNull(backupLoc);
|
||||
Objects.requireNonNull(backupId);
|
||||
|
||||
// Backup location
|
||||
URI backupPath = repository.resolve(backupLoc, backupId);
|
||||
|
@ -129,7 +130,7 @@ public class BackupManager {
|
|||
* @throws IOException in case of errors.
|
||||
*/
|
||||
public DocCollection readCollectionState(URI backupLoc, String backupId, String collectionName) throws IOException {
|
||||
Preconditions.checkNotNull(collectionName);
|
||||
Objects.requireNonNull(collectionName);
|
||||
|
||||
URI zkStateDir = repository.resolve(backupLoc, backupId, ZK_STATE_DIR);
|
||||
try (IndexInput is = repository.openInput(zkStateDir, COLLECTION_PROPS_FILE, IOContext.DEFAULT)) {
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.solr.core.backup.repository;
|
|||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
|
@ -29,8 +30,6 @@ import org.apache.solr.core.SolrResourceLoader;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
public class BackupRepositoryFactory {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
|
@ -67,9 +66,9 @@ public class BackupRepositoryFactory {
|
|||
}
|
||||
|
||||
public BackupRepository newInstance(SolrResourceLoader loader, String name) {
|
||||
Preconditions.checkNotNull(loader);
|
||||
Preconditions.checkNotNull(name);
|
||||
PluginInfo repo = Preconditions.checkNotNull(backupRepoPluginByName.get(name),
|
||||
Objects.requireNonNull(loader);
|
||||
Objects.requireNonNull(name);
|
||||
PluginInfo repo = Objects.requireNonNull(backupRepoPluginByName.get(name),
|
||||
"Could not find a backup repository with name " + name);
|
||||
|
||||
BackupRepository result = loader.newInstance(repo.className, BackupRepository.class);
|
||||
|
|
|
@ -21,7 +21,9 @@ import java.io.IOException;
|
|||
import java.io.OutputStream;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -39,8 +41,6 @@ import org.apache.solr.core.HdfsDirectoryFactory;
|
|||
import org.apache.solr.store.hdfs.HdfsDirectory;
|
||||
import org.apache.solr.store.hdfs.HdfsDirectory.HdfsIndexInput;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
public class HdfsBackupRepository implements BackupRepository {
|
||||
private static final String HDFS_UMASK_MODE_PARAM = "solr.hdfs.permissions.umask-mode";
|
||||
|
||||
|
@ -67,7 +67,7 @@ public class HdfsBackupRepository implements BackupRepository {
|
|||
this.hdfsConfig.set(FsPermission.UMASK_LABEL, umaskVal);
|
||||
}
|
||||
|
||||
String hdfsSolrHome = (String) Preconditions.checkNotNull(args.get(HdfsDirectoryFactory.HDFS_HOME),
|
||||
String hdfsSolrHome = (String) Objects.requireNonNull(args.get(HdfsDirectoryFactory.HDFS_HOME),
|
||||
"Please specify " + HdfsDirectoryFactory.HDFS_HOME + " property.");
|
||||
Path path = new Path(hdfsSolrHome);
|
||||
while (path != null) { // Compute the path of root file-system (without requiring an additional system property).
|
||||
|
@ -99,7 +99,7 @@ public class HdfsBackupRepository implements BackupRepository {
|
|||
|
||||
@Override
|
||||
public URI createURI(String location) {
|
||||
Preconditions.checkNotNull(location);
|
||||
Objects.requireNonNull(location);
|
||||
|
||||
URI result = null;
|
||||
try {
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.nio.file.Path;
|
|||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
|
@ -60,7 +61,7 @@ public class LocalFileSystemRepository implements BackupRepository {
|
|||
|
||||
@Override
|
||||
public URI createURI(String location) {
|
||||
Preconditions.checkNotNull(location);
|
||||
Objects.requireNonNull(location);
|
||||
|
||||
URI result = null;
|
||||
try {
|
||||
|
|
|
@ -26,10 +26,10 @@ import java.util.LinkedHashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
|
@ -169,7 +169,7 @@ public class SolrSnapshotMetaDataManager {
|
|||
* @throws IOException in case of I/O errors.
|
||||
*/
|
||||
public synchronized void snapshot(String name, String indexDirPath, long gen) throws IOException {
|
||||
Preconditions.checkNotNull(name);
|
||||
Objects.requireNonNull(name);
|
||||
|
||||
log.info("Creating the snapshot named {} for core {} associated with index commit with generation {} in directory {}"
|
||||
, name, solrCore.getName(), gen, indexDirPath);
|
||||
|
@ -205,7 +205,7 @@ public class SolrSnapshotMetaDataManager {
|
|||
*/
|
||||
public synchronized Optional<SnapshotMetaData> release(String name) throws IOException {
|
||||
log.info("Deleting the snapshot named {} for core {}", name, solrCore.getName());
|
||||
SnapshotMetaData result = nameToDetailsMapping.remove(Preconditions.checkNotNull(name));
|
||||
SnapshotMetaData result = nameToDetailsMapping.remove(Objects.requireNonNull(name));
|
||||
if(result != null) {
|
||||
boolean success = false;
|
||||
try {
|
||||
|
|
|
@ -21,12 +21,11 @@ import java.text.ParseException;
|
|||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
class OldBackupDirectory implements Comparable<OldBackupDirectory> {
|
||||
private static final Pattern dirNamePattern = Pattern.compile("^snapshot[.](.*)$");
|
||||
|
||||
|
@ -35,8 +34,8 @@ class OldBackupDirectory implements Comparable<OldBackupDirectory> {
|
|||
private Optional<Date> timestamp = Optional.empty();
|
||||
|
||||
public OldBackupDirectory(URI basePath, String dirName) {
|
||||
this.dirName = Preconditions.checkNotNull(dirName);
|
||||
this.basePath = Preconditions.checkNotNull(basePath);
|
||||
this.dirName = Objects.requireNonNull(dirName);
|
||||
this.basePath = Objects.requireNonNull(basePath);
|
||||
Matcher m = dirNamePattern.matcher(dirName);
|
||||
if (m.find()) {
|
||||
try {
|
||||
|
|
|
@ -27,10 +27,10 @@ import java.util.Collections;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.solr.common.SolrException;
|
||||
|
@ -84,8 +84,8 @@ public class SnapShooter {
|
|||
}
|
||||
|
||||
private void initialize(BackupRepository backupRepo, SolrCore core, URI location, String snapshotName, String commitName) {
|
||||
this.solrCore = Preconditions.checkNotNull(core);
|
||||
this.backupRepo = Preconditions.checkNotNull(backupRepo);
|
||||
this.solrCore = Objects.requireNonNull(core);
|
||||
this.backupRepo = Objects.requireNonNull(backupRepo);
|
||||
this.baseSnapDirPath = location;
|
||||
this.snapshotName = snapshotName;
|
||||
if (snapshotName != null) {
|
||||
|
@ -111,7 +111,7 @@ public class SnapShooter {
|
|||
}
|
||||
|
||||
public void validateDeleteSnapshot() {
|
||||
Preconditions.checkNotNull(this.snapshotName);
|
||||
Objects.requireNonNull(this.snapshotName);
|
||||
|
||||
boolean dirFound = false;
|
||||
String[] paths;
|
||||
|
|
|
@ -55,7 +55,6 @@ import org.apache.solr.request.SolrQueryRequest;
|
|||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.util.SolrPluginUtils;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.Multimaps;
|
||||
|
||||
|
@ -72,26 +71,6 @@ public class ExtendedDismaxQParser extends QParser {
|
|||
*/
|
||||
private static String IMPOSSIBLE_FIELD_NAME = "\uFFFC\uFFFC\uFFFC";
|
||||
|
||||
/**
|
||||
* Helper function which returns the specified {@link FieldParams}' {@link FieldParams#getWordGrams()} value.
|
||||
*/
|
||||
private static final Function<FieldParams, Integer> WORD_GRAM_EXTRACTOR = new Function<FieldParams, Integer>() {
|
||||
@Override
|
||||
public Integer apply(FieldParams input) {
|
||||
return input.getWordGrams();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function which returns the specified {@link FieldParams}' {@link FieldParams#getSlop()} value.
|
||||
*/
|
||||
private static final Function<FieldParams, Integer> PHRASE_SLOP_EXTRACTOR = new Function<FieldParams, Integer>() {
|
||||
@Override
|
||||
public Integer apply(FieldParams input) {
|
||||
return input.getSlop();
|
||||
}
|
||||
};
|
||||
|
||||
/** shorten the class references for utilities */
|
||||
private static class U extends SolrPluginUtils {
|
||||
/* :NOOP */
|
||||
|
@ -246,7 +225,7 @@ public class ExtendedDismaxQParser extends QParser {
|
|||
}
|
||||
|
||||
// create a map of {wordGram, [phraseField]}
|
||||
Multimap<Integer, FieldParams> phraseFieldsByWordGram = Multimaps.index(allPhraseFields, WORD_GRAM_EXTRACTOR);
|
||||
Multimap<Integer, FieldParams> phraseFieldsByWordGram = Multimaps.index(allPhraseFields, FieldParams::getWordGrams);
|
||||
|
||||
// for each {wordGram, [phraseField]} entry, create and add shingled field queries to the main user query
|
||||
for (Map.Entry<Integer, Collection<FieldParams>> phraseFieldsByWordGramEntry : phraseFieldsByWordGram.asMap().entrySet()) {
|
||||
|
@ -254,7 +233,7 @@ public class ExtendedDismaxQParser extends QParser {
|
|||
// group the fields within this wordGram collection by their associated slop (it's possible that the same
|
||||
// field appears multiple times for the same wordGram count but with different slop values. In this case, we
|
||||
// should take the *sum* of those phrase queries, rather than the max across them).
|
||||
Multimap<Integer, FieldParams> phraseFieldsBySlop = Multimaps.index(phraseFieldsByWordGramEntry.getValue(), PHRASE_SLOP_EXTRACTOR);
|
||||
Multimap<Integer, FieldParams> phraseFieldsBySlop = Multimaps.index(phraseFieldsByWordGramEntry.getValue(), FieldParams::getSlop);
|
||||
for (Map.Entry<Integer, Collection<FieldParams>> phraseFieldsBySlopEntry : phraseFieldsBySlop.asMap().entrySet()) {
|
||||
addShingledPhraseQueries(query, normalClauses, phraseFieldsBySlopEntry.getValue(),
|
||||
phraseFieldsByWordGramEntry.getKey(), config.tiebreaker, phraseFieldsBySlopEntry.getKey());
|
||||
|
|
|
@ -32,6 +32,7 @@ import java.util.HashSet;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -110,8 +111,6 @@ import org.apache.solr.update.SolrIndexConfig;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
/**
|
||||
|
@ -525,12 +524,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
|
|||
* Returns a collection of all field names the index reader knows about.
|
||||
*/
|
||||
public Iterable<String> getFieldNames() {
|
||||
return Iterables.transform(fieldInfos, new Function<FieldInfo,String>() {
|
||||
@Override
|
||||
public String apply(FieldInfo fieldInfo) {
|
||||
return fieldInfo.name;
|
||||
}
|
||||
});
|
||||
return Iterables.transform(fieldInfos, fieldInfo -> fieldInfo.name);
|
||||
}
|
||||
|
||||
public SolrCache<Query,DocSet> getFilterCache() {
|
||||
|
@ -2674,8 +2668,8 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
|
|||
}
|
||||
|
||||
private boolean equalsTo(FilterImpl other) {
|
||||
return Objects.equal(this.topFilter, other.topFilter) &&
|
||||
Objects.equal(this.weights, other.weights);
|
||||
return Objects.equals(this.topFilter, other.topFilter) &&
|
||||
Objects.equals(this.weights, other.weights);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,13 +16,11 @@
|
|||
*/
|
||||
package org.apache.solr.update;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A merge indexes command encapsulated in an object.
|
||||
|
@ -46,13 +44,7 @@ public class MergeIndexesCommand extends UpdateCommand {
|
|||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder(super.toString());
|
||||
Joiner joiner = Joiner.on(",");
|
||||
Iterable<String> directories = Iterables.transform(readers, new Function<DirectoryReader, String>() {
|
||||
public String apply(DirectoryReader reader) {
|
||||
return reader.directory().toString();
|
||||
}
|
||||
});
|
||||
joiner.skipNulls().join(sb, directories);
|
||||
sb.append(readers.stream().map(reader-> reader.directory().toString()).collect(Collectors.joining(",")));
|
||||
sb.append('}');
|
||||
return sb.toString();
|
||||
}
|
||||
|
|
|
@ -16,12 +16,12 @@
|
|||
*/
|
||||
package org.apache.solr.client.solrj.embedded;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
@ -44,7 +44,7 @@ public class TestJettySolrRunner extends SolrTestCaseJ4 {
|
|||
= "<solr><str name=\"configSetBaseDir\">CONFIGSETS</str><str name=\"coreRootDirectory\">COREROOT</str></solr>"
|
||||
.replace("CONFIGSETS", configsets.toString())
|
||||
.replace("COREROOT", coresDir.toString());
|
||||
Files.write(solrHome.resolve("solr.xml"), solrxml.getBytes(Charsets.UTF_8));
|
||||
Files.write(solrHome.resolve("solr.xml"), solrxml.getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
JettyConfig jettyConfig = buildJettyConfig("/solr");
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
|
@ -31,9 +32,6 @@ import org.junit.After;
|
|||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import static com.google.common.base.Predicates.alwaysFalse;
|
||||
import static com.google.common.base.Predicates.alwaysTrue;
|
||||
|
||||
public class DistributedQueueTest extends SolrTestCaseJ4 {
|
||||
|
||||
private static final Charset UTF8 = Charset.forName("UTF-8");
|
||||
|
@ -151,17 +149,20 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
|
|||
dq.offer(data);
|
||||
dq.offer(data);
|
||||
|
||||
Predicate<String> alwaysTrue = s -> true;
|
||||
Predicate<String> alwaysFalse = s -> false;
|
||||
|
||||
// Should be able to get 0, 1, 2, or 3 instantly
|
||||
for (int i = 0; i <= 3; ++i) {
|
||||
assertEquals(i, dq.peekElements(i, 0, alwaysTrue()).size());
|
||||
assertEquals(i, dq.peekElements(i, 0, alwaysTrue).size());
|
||||
}
|
||||
|
||||
// Asking for more should return only 3.
|
||||
assertEquals(3, dq.peekElements(4, 0, alwaysTrue()).size());
|
||||
assertEquals(3, dq.peekElements(4, 0, alwaysTrue).size());
|
||||
|
||||
// If we filter everything out, we should block for the full time.
|
||||
long start = System.nanoTime();
|
||||
assertEquals(0, dq.peekElements(4, 1000, alwaysFalse()).size());
|
||||
assertEquals(0, dq.peekElements(4, 1000, alwaysFalse).size());
|
||||
assertTrue(System.nanoTime() - start >= TimeUnit.MILLISECONDS.toNanos(500));
|
||||
|
||||
// If someone adds a new matching element while we're waiting, we should return immediately.
|
||||
|
|
|
@ -24,9 +24,9 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.minikdc.MiniKdc;
|
||||
import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
|
||||
|
||||
|
@ -193,10 +193,8 @@ public class KerberosTestServices {
|
|||
|
||||
public Builder withJaasConfiguration(String clientPrincipal, File clientKeytab,
|
||||
String serverPrincipal, File serverKeytab) {
|
||||
Preconditions.checkNotNull(clientPrincipal);
|
||||
Preconditions.checkNotNull(clientKeytab);
|
||||
this.clientPrincipal = clientPrincipal;
|
||||
this.clientKeytab = clientKeytab;
|
||||
this.clientPrincipal = Objects.requireNonNull(clientPrincipal);
|
||||
this.clientKeytab = Objects.requireNonNull(clientKeytab);
|
||||
this.serverPrincipal = serverPrincipal;
|
||||
this.serverKeytab = serverKeytab;
|
||||
this.appName = null;
|
||||
|
@ -204,10 +202,8 @@ public class KerberosTestServices {
|
|||
}
|
||||
|
||||
public Builder withJaasConfiguration(String principal, File keytab, String appName) {
|
||||
Preconditions.checkNotNull(principal);
|
||||
Preconditions.checkNotNull(keytab);
|
||||
this.clientPrincipal = principal;
|
||||
this.clientKeytab = keytab;
|
||||
this.clientPrincipal = Objects.requireNonNull(principal);
|
||||
this.clientKeytab = Objects.requireNonNull(keytab);
|
||||
this.serverPrincipal = null;
|
||||
this.serverKeytab = null;
|
||||
this.appName = appName;
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.solr.core;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -27,7 +28,6 @@ import java.util.Map;
|
|||
import java.util.regex.Pattern;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.apache.commons.codec.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
|
@ -537,13 +537,13 @@ public class TestLazyCores extends SolrTestCaseJ4 {
|
|||
// Write the file for core discovery
|
||||
FileUtils.writeStringToFile(new File(coreRoot, "core.properties"), "name=" + coreName +
|
||||
System.getProperty("line.separator") + "transient=true" +
|
||||
System.getProperty("line.separator") + "loadOnStartup=true", Charsets.UTF_8.toString());
|
||||
System.getProperty("line.separator") + "loadOnStartup=true", StandardCharsets.UTF_8);
|
||||
|
||||
FileUtils.writeStringToFile(new File(subHome, "solrconfig.snippet.randomindexconfig.xml"), rand_snip, Charsets.UTF_8.toString());
|
||||
FileUtils.writeStringToFile(new File(subHome, "solrconfig.snippet.randomindexconfig.xml"), rand_snip, StandardCharsets.UTF_8);
|
||||
|
||||
FileUtils.writeStringToFile(new File(subHome, "solrconfig.xml"), config, Charsets.UTF_8.toString());
|
||||
FileUtils.writeStringToFile(new File(subHome, "solrconfig.xml"), config, StandardCharsets.UTF_8);
|
||||
|
||||
FileUtils.writeStringToFile(new File(subHome, "schema.xml"), schema, Charsets.UTF_8.toString());
|
||||
FileUtils.writeStringToFile(new File(subHome, "schema.xml"), schema, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
// Write out the cores' config files, both bad schema files, bad config files as well as some good cores.
|
||||
|
@ -565,11 +565,11 @@ public class TestLazyCores extends SolrTestCaseJ4 {
|
|||
// Collect the files that we'll write to the config directories.
|
||||
String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
|
||||
String min_schema = FileUtils.readFileToString(new File(top, "schema-tiny.xml"),
|
||||
Charsets.UTF_8.toString());
|
||||
StandardCharsets.UTF_8);
|
||||
String min_config = FileUtils.readFileToString(new File(top, "solrconfig-minimal.xml"),
|
||||
Charsets.UTF_8.toString());
|
||||
StandardCharsets.UTF_8);
|
||||
String rand_snip = FileUtils.readFileToString(new File(top, "solrconfig.snippet.randomindexconfig.xml"),
|
||||
Charsets.UTF_8.toString());
|
||||
StandardCharsets.UTF_8);
|
||||
|
||||
// Now purposely mess up the config files, introducing stupid syntax errors.
|
||||
String bad_config = min_config.replace("<requestHandler", "<reqsthalr");
|
||||
|
|
|
@ -17,12 +17,12 @@
|
|||
package org.apache.solr.handler.admin;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
|
||||
import org.apache.commons.codec.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||
|
@ -250,7 +250,7 @@ public class CoreAdminHandlerTest extends SolrTestCaseJ4 {
|
|||
solrHomeDirectory.mkdirs();
|
||||
copySolrHomeToTemp(solrHomeDirectory, "corex");
|
||||
File corex = new File(solrHomeDirectory, "corex");
|
||||
FileUtils.write(new File(corex, "core.properties"), "", Charsets.UTF_8.toString());
|
||||
FileUtils.write(new File(corex, "core.properties"), "", StandardCharsets.UTF_8);
|
||||
JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr"));
|
||||
runner.start();
|
||||
|
||||
|
|
|
@ -19,8 +19,8 @@ package org.apache.solr.schema;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.codec.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
|
@ -90,11 +90,11 @@ public class ChangedSchemaMergeTest extends SolrTestCaseJ4 {
|
|||
copyMinConf(changed, "name=changed");
|
||||
// Overlay with my local schema
|
||||
schemaFile = new File(new File(changed, "conf"), "schema.xml");
|
||||
FileUtils.writeStringToFile(schemaFile, withWhich, Charsets.UTF_8.toString());
|
||||
FileUtils.writeStringToFile(schemaFile, withWhich, StandardCharsets.UTF_8);
|
||||
|
||||
String discoveryXml = "<solr></solr>";
|
||||
File solrXml = new File(solrHomeDirectory, "solr.xml");
|
||||
FileUtils.write(solrXml, discoveryXml, Charsets.UTF_8.toString());
|
||||
FileUtils.write(solrXml, discoveryXml, StandardCharsets.UTF_8);
|
||||
|
||||
final CoreContainer cores = new CoreContainer(solrHomeDirectory.getAbsolutePath());
|
||||
cores.load();
|
||||
|
@ -133,7 +133,7 @@ public class ChangedSchemaMergeTest extends SolrTestCaseJ4 {
|
|||
changed.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
|
||||
|
||||
// write the new schema out and make it current
|
||||
FileUtils.writeStringToFile(schemaFile, withoutWhich, Charsets.UTF_8.toString());
|
||||
FileUtils.writeStringToFile(schemaFile, withoutWhich, StandardCharsets.UTF_8);
|
||||
|
||||
IndexSchema iSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", changed.getSolrConfig());
|
||||
changed.setLatestSchema(iSchema);
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.schema;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.SolrJettyTestBase;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
|
@ -34,6 +33,7 @@ import java.io.File;
|
|||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
@ -64,7 +64,7 @@ public class TestBinaryField extends SolrJettyTestBase {
|
|||
FileUtils.copyFile(new File(src_dir, "solrconfig.snippet.randomindexconfig.xml"),
|
||||
new File(confDir, "solrconfig.snippet.randomindexconfig.xml"));
|
||||
|
||||
try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), Charsets.UTF_8)) {
|
||||
try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), StandardCharsets.UTF_8)) {
|
||||
Properties coreProps = new Properties();
|
||||
coreProps.put("name", "collection1");
|
||||
coreProps.store(w, "");
|
||||
|
|
|
@ -18,12 +18,12 @@ package org.apache.solr.security;
|
|||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
|
@ -49,7 +49,7 @@ public class TestAuthorizationFramework extends AbstractFullDistribZkTestBase {
|
|||
try (ZkStateReader zkStateReader = new ZkStateReader(zkServer.getZkAddress(),
|
||||
TIMEOUT, TIMEOUT)) {
|
||||
zkStateReader.getZkClient().create(ZkStateReader.SOLR_SECURITY_CONF_PATH,
|
||||
"{\"authorization\":{\"class\":\"org.apache.solr.security.MockAuthorizationPlugin\"}}".getBytes(Charsets.UTF_8),
|
||||
"{\"authorization\":{\"class\":\"org.apache.solr.security.MockAuthorizationPlugin\"}}".getBytes(StandardCharsets.UTF_8),
|
||||
CreateMode.PERSISTENT, true);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.common.cloud;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.base.Throwables;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.cloud.ZkTestServer;
|
||||
|
@ -30,6 +29,7 @@ import org.junit.BeforeClass;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
|
@ -69,7 +69,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
|
|||
ZkConfigManager configManager = new ZkConfigManager(zkClient);
|
||||
assertEquals(0, configManager.listConfigs().size());
|
||||
|
||||
byte[] testdata = "test data".getBytes(Charsets.UTF_8);
|
||||
byte[] testdata = "test data".getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
Path tempConfig = createTempDir("config");
|
||||
Files.createFile(tempConfig.resolve("file1"));
|
||||
|
@ -102,7 +102,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
|
|||
assertArrayEquals(testdata, checkdata);
|
||||
|
||||
// uploading to the same config overwrites
|
||||
byte[] overwritten = "new test data".getBytes(Charsets.UTF_8);
|
||||
byte[] overwritten = "new test data".getBytes(StandardCharsets.UTF_8);
|
||||
Files.write(tempConfig.resolve("file1"), overwritten);
|
||||
configManager.uploadConfigDir(tempConfig, "testconfig");
|
||||
|
||||
|
@ -147,7 +147,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
|
|||
@Override
|
||||
protected Collection<ZkCredentials> createCredentials() {
|
||||
List<ZkCredentials> credentials = new ArrayList<>();
|
||||
credentials.add(new ZkCredentials("digest", (readOnlyUsername + ":" + readOnlyPassword).getBytes(Charsets.UTF_8)));
|
||||
credentials.add(new ZkCredentials("digest", (readOnlyUsername + ":" + readOnlyPassword).getBytes(StandardCharsets.UTF_8)));
|
||||
return credentials;
|
||||
}
|
||||
};
|
||||
|
@ -156,7 +156,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
|
|||
@Override
|
||||
protected Collection<ZkCredentials> createCredentials() {
|
||||
List<ZkCredentials> credentials = new ArrayList<>();
|
||||
credentials.add(new ZkCredentials("digest", (writeableUsername + ":" + writeablePassword).getBytes(Charsets.UTF_8)));
|
||||
credentials.add(new ZkCredentials("digest", (writeableUsername + ":" + writeablePassword).getBytes(StandardCharsets.UTF_8)));
|
||||
return credentials;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
|||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -34,7 +35,6 @@ import java.util.Properties;
|
|||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
|
@ -434,7 +434,7 @@ public class SolrTestCaseHS extends SolrTestCaseJ4 {
|
|||
copyConfFile(baseDir, collection, schemaFile);
|
||||
|
||||
File collDir = new File(baseDir, collection);
|
||||
try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), Charsets.UTF_8)) {
|
||||
try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), StandardCharsets.UTF_8)) {
|
||||
Properties coreProps = new Properties();
|
||||
coreProps.put("name", "collection1");
|
||||
coreProps.put("config", solrconfigFile);
|
||||
|
|
|
@ -55,7 +55,6 @@ import java.util.Properties;
|
|||
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
||||
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
|
||||
import org.apache.commons.codec.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
|
@ -127,7 +126,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
/**
|
||||
* A junit4 Solr test harness that extends LuceneTestCaseJ4. To change which core is used when loading the schema and solrconfig.xml, simply
|
||||
|
@ -639,7 +638,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public static CoreContainer createCoreContainer(Path solrHome, String solrXML) {
|
||||
testSolrHome = checkNotNull(solrHome);
|
||||
testSolrHome = requireNonNull(solrHome);
|
||||
h = new TestHarness(solrHome, solrXML);
|
||||
lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
|
||||
return h.getCoreContainer();
|
||||
|
@ -661,7 +660,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public static CoreContainer createDefaultCoreContainer(Path solrHome) {
|
||||
testSolrHome = checkNotNull(solrHome);
|
||||
testSolrHome = requireNonNull(solrHome);
|
||||
h = new TestHarness("collection1", initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml");
|
||||
lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
|
||||
return h.getCoreContainer();
|
||||
|
@ -1870,7 +1869,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
|||
}
|
||||
Files.createFile(dstRoot.toPath().resolve("core.properties"));
|
||||
if (propertiesContent != null) {
|
||||
FileUtils.writeStringToFile(new File(dstRoot, "core.properties"), propertiesContent, Charsets.UTF_8.toString());
|
||||
FileUtils.writeStringToFile(new File(dstRoot, "core.properties"), propertiesContent, StandardCharsets.UTF_8);
|
||||
}
|
||||
String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
|
||||
FileUtils.copyFile(new File(top, "schema-tiny.xml"), new File(subHome, "schema.xml"));
|
||||
|
|
|
@ -20,6 +20,7 @@ import javax.servlet.Filter;
|
|||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
|
@ -37,7 +38,6 @@ import java.util.concurrent.Future;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.solr.client.solrj.embedded.JettyConfig;
|
||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||
import org.apache.solr.client.solrj.embedded.SSLConfig;
|
||||
|
@ -200,7 +200,7 @@ public class MiniSolrCloudCluster {
|
|||
try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) {
|
||||
zkClient.makePath("/solr/solr.xml", solrXml.getBytes(Charset.defaultCharset()), true);
|
||||
if (jettyConfig.sslConfig != null && jettyConfig.sslConfig.isSSLMode()) {
|
||||
zkClient.makePath("/solr" + ZkStateReader.CLUSTER_PROPS, "{'urlScheme':'https'}".getBytes(Charsets.UTF_8), true);
|
||||
zkClient.makePath("/solr" + ZkStateReader.CLUSTER_PROPS, "{'urlScheme':'https'}".getBytes(StandardCharsets.UTF_8), true);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import com.google.common.collect.Ordering;
|
||||
import com.google.common.util.concurrent.AtomicLongMap;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
|
@ -150,18 +149,18 @@ public class ZkTestServer {
|
|||
}
|
||||
|
||||
private String reportLimitViolations() {
|
||||
Object[] maxKeys = maxCounters.keySet().toArray();
|
||||
Arrays.sort(maxKeys, new Comparator<Object>() {
|
||||
private final Comparator<Long> valComp = Ordering.natural().reverse();
|
||||
String[] maxKeys = maxCounters.keySet().toArray(new String[maxCounters.size()]);
|
||||
Arrays.sort(maxKeys, new Comparator<String>() {
|
||||
private final Comparator<Long> valComp = Comparator.<Long>naturalOrder().reversed();
|
||||
@Override
|
||||
public int compare(Object o1, Object o2) {
|
||||
public int compare(String o1, String o2) {
|
||||
return valComp.compare(maxCounters.get(o1), maxCounters.get(o2));
|
||||
}
|
||||
});
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
boolean first = true;
|
||||
for (Object key : maxKeys) {
|
||||
for (String key : maxKeys) {
|
||||
long value = maxCounters.get(key);
|
||||
if (value <= limit) continue;
|
||||
if (first) {
|
||||
|
|
Loading…
Reference in New Issue