mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
Integrate forbiddenAPI checks into Maven build.
This commit integrates the forbiddenAPI checks that checks Java byte code against a list of "forbidden" API signatures. The commit also contains the fixes of the current source code that didn't pass the default API checks. See https://code.google.com/p/forbidden-apis/ for details. Closes #3059
This commit is contained in:
parent
c4db582f26
commit
31f0aca65d
27
core-signatures.txt
Normal file
27
core-signatures.txt
Normal file
@ -0,0 +1,27 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# see Solr's DefaultSolrThreadFactory
|
||||
# see Lucene's NamedThreadFactory
|
||||
|
||||
@defaultMessage spawns threads with vague names; use a custom thread factory and name threads so that you can tell (by its name) which executor it is associated with
|
||||
|
||||
java.util.concurrent.Executors#newFixedThreadPool(int)
|
||||
java.util.concurrent.Executors#newSingleThreadExecutor()
|
||||
java.util.concurrent.Executors#newCachedThreadPool()
|
||||
java.util.concurrent.Executors#newSingleThreadScheduledExecutor()
|
||||
java.util.concurrent.Executors#newScheduledThreadPool(int)
|
||||
java.util.concurrent.Executors#defaultThreadFactory()
|
||||
java.util.concurrent.Executors#privilegedThreadFactory()
|
62
pom.xml
62
pom.xml
@ -792,6 +792,68 @@
|
||||
</postremoveScriptlet>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>de.thetaphi</groupId>
|
||||
<artifactId>forbiddenapis</artifactId>
|
||||
<version>1.3</version>
|
||||
|
||||
<executions>
|
||||
<execution>
|
||||
<id>check-forbidden-apis</id>
|
||||
<configuration>
|
||||
<targetVersion>1.6</targetVersion>
|
||||
<!-- disallow undocumented classes like sun.misc.Unsafe: -->
|
||||
<internalRuntimeForbidden>true</internalRuntimeForbidden>
|
||||
<!-- if the used Java version is too new, don't fail, just do nothing: -->
|
||||
<failOnUnsupportedJava>false</failOnUnsupportedJava>
|
||||
<excludes>
|
||||
<exclude>jsr166e/**</exclude>
|
||||
<exclude>jsr166y/**</exclude>
|
||||
<!-- these excludes are due to sysout/syserr usage - we should check those at some point
|
||||
<exclude>org/elasticsearch/common/logging/log4j/ConsoleAppender*</exclude>
|
||||
<exclude>org/elasticsearch/common/logging/support/LoggerMessageFormat.class</exclude>
|
||||
<exclude>org/elasticsearch/common/Base64.class</exclude>
|
||||
<exclude>org/elasticsearch/common/compress/bzip2/CBZip2InputStream.class</exclude>
|
||||
<exclude>org/elasticsearch/plugins/PluginManager.class</exclude>
|
||||
<exclude>org/elasticsearch/bootstrap/Bootstrap.class</exclude>
|
||||
-->
|
||||
</excludes>
|
||||
<bundledSignatures>
|
||||
<!-- This will automatically choose the right signatures based on 'maven.compiler.target': -->
|
||||
<bundledSignature>jdk-unsafe</bundledSignature>
|
||||
<bundledSignature>jdk-deprecated</bundledSignature>
|
||||
<!-- <bundledSignaure>jdk-system-out</bundledSignaure> see excludes -->
|
||||
</bundledSignatures>
|
||||
<signaturesFiles>
|
||||
<signaturesFile>core-signatures.txt</signaturesFile>
|
||||
</signaturesFiles>
|
||||
</configuration>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>check-forbidden-test-apis</id>
|
||||
<configuration>
|
||||
<targetVersion>1.6</targetVersion>
|
||||
<!-- disallow undocumented classes like sun.misc.Unsafe: -->
|
||||
<internalRuntimeForbidden>true</internalRuntimeForbidden>
|
||||
<!-- if the used Java version is too new, don't fail, just do nothing: -->
|
||||
<failOnUnsupportedJava>false</failOnUnsupportedJava>
|
||||
<bundledSignatures>
|
||||
<!-- This will automatically choose the right signatures based on 'maven.compiler.target': -->
|
||||
<bundledSignature>jdk-unsafe</bundledSignature>
|
||||
<bundledSignature>jdk-deprecated</bundledSignature>
|
||||
</bundledSignatures>
|
||||
</configuration>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>testCheck</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.action.support.broadcast;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
|
||||
/**
|
||||
@ -69,6 +71,6 @@ public enum BroadcastOperationThreading {
|
||||
if (value == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
return BroadcastOperationThreading.valueOf(value.toUpperCase());
|
||||
return BroadcastOperationThreading.valueOf(value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +108,6 @@ public abstract class TransportIndicesReplicationOperationAction<Request extends
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
e.printStackTrace();
|
||||
int index = indexCounter.getAndIncrement();
|
||||
if (accumulateExceptions()) {
|
||||
indexResponses.set(index, e);
|
||||
|
@ -38,6 +38,7 @@ import org.elasticsearch.node.internal.InternalSettingsPerparer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
@ -186,7 +187,7 @@ public class Bootstrap {
|
||||
}
|
||||
|
||||
// warn if running using the client VM
|
||||
if (JvmInfo.jvmInfo().vmName().toLowerCase().contains("client")) {
|
||||
if (JvmInfo.jvmInfo().vmName().toLowerCase(Locale.ROOT).contains("client")) {
|
||||
ESLogger logger = Loggers.getLogger(Bootstrap.class);
|
||||
logger.warn("jvm uses the client vm, make sure to run `java` with the server vm for best performance by adding `-server` to the command line");
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -108,7 +109,7 @@ public class ClusterBlock implements Serializable, Streamable, ToXContent {
|
||||
}
|
||||
builder.startArray("levels");
|
||||
for (ClusterBlockLevel level : levels) {
|
||||
builder.value(level.name().toLowerCase());
|
||||
builder.value(level.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
|
@ -23,6 +23,8 @@ import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.io.Closeables;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
@ -36,6 +38,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
@ -61,8 +64,11 @@ import org.elasticsearch.river.RiverIndexName;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
@ -433,7 +439,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
||||
int lastDotIndex = mappingFile.getName().lastIndexOf('.');
|
||||
String mappingType = lastDotIndex != -1 ? mappingFile.getName().substring(0, lastDotIndex) : mappingFile.getName();
|
||||
try {
|
||||
String mappingSource = Streams.copyToString(new FileReader(mappingFile));
|
||||
String mappingSource = Streams.copyToString(new InputStreamReader(new FileInputStream(mappingFile), Streams.UTF8));
|
||||
if (mappings.containsKey(mappingType)) {
|
||||
XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource));
|
||||
} else {
|
||||
@ -504,7 +510,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
||||
if (!request.index.equals(riverIndexName) && !request.index.equals(PercolatorService.INDEX_NAME) && request.index.charAt(0) == '_') {
|
||||
throw new InvalidIndexNameException(new Index(request.index), request.index, "must not start with '_'");
|
||||
}
|
||||
if (!request.index.toLowerCase().equals(request.index)) {
|
||||
if (!request.index.toLowerCase(Locale.ROOT).equals(request.index)) {
|
||||
throw new InvalidIndexNameException(new Index(request.index), request.index, "must be lowercase");
|
||||
}
|
||||
if (!Strings.validFileName(request.index)) {
|
||||
|
@ -35,6 +35,7 @@ import org.elasticsearch.indices.IndexTemplateAlreadyExistsException;
|
||||
import org.elasticsearch.indices.IndexTemplateMissingException;
|
||||
import org.elasticsearch.indices.InvalidIndexTemplateException;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -149,7 +150,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
||||
if (request.name.startsWith("_")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "name must not start with '_'");
|
||||
}
|
||||
if (!request.name.toLowerCase().equals(request.name)) {
|
||||
if (!request.name.toLowerCase(Locale.ROOT).equals(request.name)) {
|
||||
throw new InvalidIndexTemplateException(request.name, "name must be lower cased");
|
||||
}
|
||||
if (request.template.contains(" ")) {
|
||||
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.settings.IndexDynamicSettings;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
@ -188,7 +189,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
||||
}
|
||||
|
||||
if (!removedSettings.isEmpty() && !openIndices.isEmpty()) {
|
||||
listener.onFailure(new ElasticSearchIllegalArgumentException(String.format(
|
||||
listener.onFailure(new ElasticSearchIllegalArgumentException(String.format(Locale.ROOT,
|
||||
"Can't update non dynamic settings[%s] for open indices[%s]",
|
||||
removedSettings,
|
||||
openIndices
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* This {@link AllocationDecider} controls re-balancing operations based on the
|
||||
@ -81,7 +82,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider {
|
||||
logger.warn("[cluster.routing.allocation.allow_rebalance] has a wrong value {}, defaulting to 'indices_all_active'", allowRebalance);
|
||||
type = ClusterRebalanceType.INDICES_ALL_ACTIVE;
|
||||
}
|
||||
logger.debug("using [cluster.routing.allocation.allow_rebalance] with [{}]", type.toString().toLowerCase());
|
||||
logger.debug("using [cluster.routing.allocation.allow_rebalance] with [{}]", type.toString().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.cluster.routing.allocation.decider;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* This abstract class defining basic {@link Decision} used during shard
|
||||
@ -102,7 +103,7 @@ public abstract class Decision {
|
||||
if (explanation == null) {
|
||||
return type + "()";
|
||||
}
|
||||
return type + "(" + String.format(explanation, explanationParams) + ")";
|
||||
return type + "(" + String.format(Locale.ROOT, explanation, explanationParams) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* <p>Encodes and decodes to and from Base64 notation.</p>
|
||||
@ -907,7 +908,7 @@ public class Base64 {
|
||||
|
||||
if (off + len > source.length) {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Cannot have offset of %d and length of %d with array of length %d", off, len, source.length));
|
||||
String.format(Locale.ROOT, "Cannot have offset of %d and length of %d with array of length %d", off, len, source.length));
|
||||
} // end if: off < 0
|
||||
|
||||
|
||||
@ -1050,11 +1051,11 @@ public class Base64 {
|
||||
throw new NullPointerException("Destination array was null.");
|
||||
} // end if
|
||||
if (srcOffset < 0 || srcOffset + 3 >= source.length) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
throw new IllegalArgumentException(String.format(Locale.ROOT,
|
||||
"Source array with length %d cannot have offset of %d and still process four bytes.", source.length, srcOffset));
|
||||
} // end if
|
||||
if (destOffset < 0 || destOffset + 2 >= destination.length) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
throw new IllegalArgumentException(String.format(Locale.ROOT,
|
||||
"Destination array with length %d cannot have offset of %d and still store three bytes.", destination.length, destOffset));
|
||||
} // end if
|
||||
|
||||
@ -1160,7 +1161,7 @@ public class Base64 {
|
||||
throw new NullPointerException("Cannot decode null source array.");
|
||||
} // end if
|
||||
if (off < 0 || off + len > source.length) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
throw new IllegalArgumentException(String.format(Locale.ROOT,
|
||||
"Source array with length %d cannot have offset of %d and process %d bytes.", source.length, off, len));
|
||||
} // end if
|
||||
|
||||
@ -1205,7 +1206,7 @@ public class Base64 {
|
||||
} // end if: white space, equals sign or better
|
||||
else {
|
||||
// There's a bad input character in the Base64 stream.
|
||||
throw new java.io.IOException(String.format(
|
||||
throw new java.io.IOException(String.format(Locale.ROOT,
|
||||
"Bad Base64 input character decimal %d in array position %d", ((int) source[i]) & 0xFF, i));
|
||||
} // end else:
|
||||
} // each input character
|
||||
|
@ -28,6 +28,8 @@ import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.util.Random;
|
||||
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ -36,13 +38,13 @@ public abstract class Names {
|
||||
public static String randomNodeName(URL nodeNames) {
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream()));
|
||||
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Streams.UTF8));
|
||||
int numberOfNames = 0;
|
||||
while (reader.readLine() != null) {
|
||||
numberOfNames++;
|
||||
}
|
||||
reader.close();
|
||||
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream()));
|
||||
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Streams.UTF8));
|
||||
int number = ((ThreadLocalRandom.current().nextInt(numberOfNames)) % numberOfNames);
|
||||
for (int i = 0; i < number; i++) {
|
||||
reader.readLine();
|
||||
@ -66,7 +68,7 @@ public abstract class Names {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(nodeNames));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(nodeNames, Streams.UTF8));
|
||||
int numberOfNames = Integer.parseInt(reader.readLine());
|
||||
int number = ((new Random().nextInt(numberOfNames)) % numberOfNames) - 2; // remove 2 for last line and first line
|
||||
for (int i = 0; i < number; i++) {
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
@ -228,10 +229,10 @@ public class StopWatch {
|
||||
sb.append("-----------------------------------------\n");
|
||||
sb.append("ms % Task name\n");
|
||||
sb.append("-----------------------------------------\n");
|
||||
NumberFormat nf = NumberFormat.getNumberInstance();
|
||||
NumberFormat nf = NumberFormat.getNumberInstance(Locale.ROOT);
|
||||
nf.setMinimumIntegerDigits(5);
|
||||
nf.setGroupingUsed(false);
|
||||
NumberFormat pf = NumberFormat.getPercentInstance();
|
||||
NumberFormat pf = NumberFormat.getPercentInstance(Locale.ROOT);
|
||||
pf.setMinimumIntegerDigits(3);
|
||||
pf.setGroupingUsed(false);
|
||||
for (TaskInfo task : taskInfo()) {
|
||||
|
@ -424,8 +424,8 @@ public class Strings {
|
||||
if (str.length() < prefix.length()) {
|
||||
return false;
|
||||
}
|
||||
String lcStr = str.substring(0, prefix.length()).toLowerCase();
|
||||
String lcPrefix = prefix.toLowerCase();
|
||||
String lcStr = str.substring(0, prefix.length()).toLowerCase(Locale.ROOT);
|
||||
String lcPrefix = prefix.toLowerCase(Locale.ROOT);
|
||||
return lcStr.equals(lcPrefix);
|
||||
}
|
||||
|
||||
@ -448,8 +448,8 @@ public class Strings {
|
||||
return false;
|
||||
}
|
||||
|
||||
String lcStr = str.substring(str.length() - suffix.length()).toLowerCase();
|
||||
String lcSuffix = suffix.toLowerCase();
|
||||
String lcStr = str.substring(str.length() - suffix.length()).toLowerCase(Locale.ROOT);
|
||||
String lcSuffix = suffix.toLowerCase(Locale.ROOT);
|
||||
return lcStr.equals(lcSuffix);
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@ import org.elasticsearch.common.inject.internal.InternalFactory;
|
||||
import org.elasticsearch.common.inject.internal.Scoping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Built-in scope implementations.
|
||||
@ -61,7 +62,7 @@ public class Scopes {
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return String.format("%s[%s]", creator, SINGLETON);
|
||||
return String.format(Locale.ROOT, "%s[%s]", creator, SINGLETON);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -430,7 +430,7 @@ public final class Errors implements Serializable {
|
||||
for (int i = 0; i < arguments.length; i++) {
|
||||
arguments[i] = Errors.convert(arguments[i]);
|
||||
}
|
||||
return String.format(messageFormat, arguments);
|
||||
return String.format(Locale.ROOT, messageFormat, arguments);
|
||||
}
|
||||
|
||||
public List<Message> getMessages() {
|
||||
@ -452,36 +452,41 @@ public final class Errors implements Serializable {
|
||||
* Returns the formatted message for an exception with the specified messages.
|
||||
*/
|
||||
public static String format(String heading, Collection<Message> errorMessages) {
|
||||
Formatter fmt = new Formatter().format(heading).format(":%n%n");
|
||||
int index = 1;
|
||||
boolean displayCauses = getOnlyCause(errorMessages) == null;
|
||||
|
||||
for (Message errorMessage : errorMessages) {
|
||||
fmt.format("%s) %s%n", index++, errorMessage.getMessage());
|
||||
|
||||
List<Object> dependencies = errorMessage.getSources();
|
||||
for (int i = dependencies.size() - 1; i >= 0; i--) {
|
||||
Object source = dependencies.get(i);
|
||||
formatSource(fmt, source);
|
||||
final Formatter fmt = new Formatter(Locale.ROOT);
|
||||
try {
|
||||
fmt.format(heading).format(":%n%n");
|
||||
int index = 1;
|
||||
boolean displayCauses = getOnlyCause(errorMessages) == null;
|
||||
|
||||
for (Message errorMessage : errorMessages) {
|
||||
fmt.format("%s) %s%n", index++, errorMessage.getMessage());
|
||||
|
||||
List<Object> dependencies = errorMessage.getSources();
|
||||
for (int i = dependencies.size() - 1; i >= 0; i--) {
|
||||
Object source = dependencies.get(i);
|
||||
formatSource(fmt, source);
|
||||
}
|
||||
|
||||
Throwable cause = errorMessage.getCause();
|
||||
if (displayCauses && cause != null) {
|
||||
StringWriter writer = new StringWriter();
|
||||
cause.printStackTrace(new PrintWriter(writer));
|
||||
fmt.format("Caused by: %s", writer.getBuffer());
|
||||
}
|
||||
|
||||
fmt.format("%n");
|
||||
}
|
||||
|
||||
Throwable cause = errorMessage.getCause();
|
||||
if (displayCauses && cause != null) {
|
||||
StringWriter writer = new StringWriter();
|
||||
cause.printStackTrace(new PrintWriter(writer));
|
||||
fmt.format("Caused by: %s", writer.getBuffer());
|
||||
|
||||
if (errorMessages.size() == 1) {
|
||||
fmt.format("1 error");
|
||||
} else {
|
||||
fmt.format("%s errors", errorMessages.size());
|
||||
}
|
||||
|
||||
fmt.format("%n");
|
||||
|
||||
return fmt.toString();
|
||||
} finally {
|
||||
fmt.close();
|
||||
}
|
||||
|
||||
if (errorMessages.size() == 1) {
|
||||
fmt.format("1 error");
|
||||
} else {
|
||||
fmt.format("%s errors", errorMessages.size());
|
||||
}
|
||||
|
||||
return fmt.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -171,7 +171,7 @@ public class FastByteArrayOutputStream extends OutputStream implements BytesStre
|
||||
* @since JDK1.1
|
||||
*/
|
||||
public String toString() {
|
||||
return new String(buf, 0, count);
|
||||
return new String(buf, 0, count, Streams.UTF8);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.CachedStreamOutput;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.Charset;
|
||||
|
||||
/**
|
||||
* Simple utility methods for file and stream copying.
|
||||
@ -34,6 +35,8 @@ import java.io.*;
|
||||
* but also useful for application code.
|
||||
*/
|
||||
public abstract class Streams {
|
||||
|
||||
public static final Charset UTF8 = Charset.forName("UTF-8");
|
||||
|
||||
public static final int BUFFER_SIZE = 1024 * 8;
|
||||
|
||||
@ -252,7 +255,7 @@ public abstract class Streams {
|
||||
if (is == null) {
|
||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]");
|
||||
}
|
||||
return copyToString(new InputStreamReader(is, "UTF-8"));
|
||||
return copyToString(new InputStreamReader(is, UTF8));
|
||||
}
|
||||
|
||||
public static String copyToStringFromClasspath(String path) throws IOException {
|
||||
@ -260,7 +263,7 @@ public abstract class Streams {
|
||||
if (is == null) {
|
||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
|
||||
}
|
||||
return copyToString(new InputStreamReader(is));
|
||||
return copyToString(new InputStreamReader(is, UTF8));
|
||||
}
|
||||
|
||||
public static byte[] copyToBytesFromClasspath(String path) throws IOException {
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.jna;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import com.sun.jna.Native;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
@ -42,11 +44,11 @@ public class Natives {
|
||||
}
|
||||
|
||||
if (errno != Integer.MIN_VALUE) {
|
||||
if (errno == CLibrary.ENOMEM && System.getProperty("os.name").toLowerCase().contains("linux")) {
|
||||
if (errno == CLibrary.ENOMEM && System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux")) {
|
||||
logger.warn("Unable to lock JVM memory (ENOMEM)."
|
||||
+ " This can result in part of the JVM being swapped out."
|
||||
+ " Increase RLIMIT_MEMLOCK or run elasticsearch as root.");
|
||||
} else if (!System.getProperty("os.name").toLowerCase().contains("mac")) {
|
||||
} else if (!System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("mac")) {
|
||||
// OS X allows mlockall to be called, but always returns an error
|
||||
logger.warn("Unknown mlockall error " + errno);
|
||||
}
|
||||
|
@ -19,11 +19,13 @@
|
||||
|
||||
package org.elasticsearch.common.lucene;
|
||||
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
||||
/**
|
||||
* A {@link java.io.PrintStream} that logs each {@link #println(String)} into a logger
|
||||
@ -43,7 +45,12 @@ public class LoggerInfoStream extends PrintStream {
|
||||
* by appending to its <tt>NAME</tt> the {@link #SUFFIX}.
|
||||
*/
|
||||
public static LoggerInfoStream getInfoStream(ESLogger logger) {
|
||||
return new LoggerInfoStream(Loggers.getLogger(logger, SUFFIX));
|
||||
try {
|
||||
return new LoggerInfoStream(Loggers.getLogger(logger, SUFFIX));
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
// no UTF-8 ?
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -51,7 +58,12 @@ public class LoggerInfoStream extends PrintStream {
|
||||
* by appending to it the {@link #SUFFIX}.
|
||||
*/
|
||||
public static LoggerInfoStream getInfoStream(String name) {
|
||||
return new LoggerInfoStream(Loggers.getLogger(name + SUFFIX));
|
||||
try {
|
||||
return new LoggerInfoStream(Loggers.getLogger(name + SUFFIX));
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
// no UTF-8 ?
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private final ESLogger logger;
|
||||
@ -59,9 +71,10 @@ public class LoggerInfoStream extends PrintStream {
|
||||
/**
|
||||
* Constucts a new instance based on the provided logger. Will output
|
||||
* each {@link #println(String)} operation as a trace level.
|
||||
* @throws UnsupportedEncodingException
|
||||
*/
|
||||
public LoggerInfoStream(ESLogger logger) {
|
||||
super((OutputStream) null);
|
||||
public LoggerInfoStream(ESLogger logger) throws UnsupportedEncodingException {
|
||||
super((OutputStream) null, false, Streams.UTF8.name());
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
|
@ -322,27 +322,6 @@ public class Lucene {
|
||||
}
|
||||
}
|
||||
|
||||
private static final Field segmentReaderSegmentInfoField;
|
||||
|
||||
static {
|
||||
Field segmentReaderSegmentInfoFieldX = null;
|
||||
try {
|
||||
segmentReaderSegmentInfoFieldX = SegmentReader.class.getDeclaredField("si");
|
||||
segmentReaderSegmentInfoFieldX.setAccessible(true);
|
||||
} catch (NoSuchFieldException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
segmentReaderSegmentInfoField = segmentReaderSegmentInfoFieldX;
|
||||
}
|
||||
|
||||
public static SegmentInfoPerCommit getSegmentInfo(SegmentReader reader) {
|
||||
try {
|
||||
return (SegmentInfoPerCommit) segmentReaderSegmentInfoField.get(reader);
|
||||
} catch (IllegalAccessException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static class ExistsCollector extends Collector {
|
||||
|
||||
private boolean exists;
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
@ -175,7 +176,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
||||
|
||||
// top level score = subquery.score * filter.score (this already has the query boost)
|
||||
float topLevelScore = subQueryExpl.getValue() * sc;
|
||||
Explanation topLevel = new ComplexExplanation(true, topLevelScore, "custom score, score mode [" + scoreMode.toString().toLowerCase() + "]");
|
||||
Explanation topLevel = new ComplexExplanation(true, topLevelScore, "custom score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]");
|
||||
topLevel.addDetail(subQueryExpl);
|
||||
topLevel.addDetail(filterExplanation);
|
||||
return topLevel;
|
||||
@ -230,7 +231,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
||||
factor = maxBoost;
|
||||
}
|
||||
float sc = factor * subQueryExpl.getValue() * getBoost();
|
||||
Explanation res = new ComplexExplanation(true, sc, "custom score, score mode [" + scoreMode.toString().toLowerCase() + "]");
|
||||
Explanation res = new ComplexExplanation(true, sc, "custom score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]");
|
||||
res.addDetail(subQueryExpl);
|
||||
for (Explanation explanation : filtersExplanations) {
|
||||
res.addDetail(explanation);
|
||||
|
@ -21,12 +21,16 @@ package org.elasticsearch.common.math;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.text.MessageFormat;
|
||||
import java.text.ParseException;
|
||||
import java.util.*;
|
||||
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
|
||||
/**
|
||||
* Base class for commons-math unchecked exceptions.
|
||||
*
|
||||
@ -193,7 +197,7 @@ public class MathRuntimeException extends RuntimeException {
|
||||
@Override
|
||||
public void printStackTrace(final PrintStream out) {
|
||||
synchronized (out) {
|
||||
PrintWriter pw = new PrintWriter(out, false);
|
||||
PrintWriter pw = new PrintWriter(new OutputStreamWriter(out, Streams.UTF8));
|
||||
printStackTrace(pw);
|
||||
// Flush the PrintWriter before it's GC'ed.
|
||||
pw.flush();
|
||||
|
@ -32,6 +32,7 @@ import java.net.NetworkInterface;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@ -154,19 +155,19 @@ public class NetworkService extends AbstractComponent {
|
||||
if (host.equals("local")) {
|
||||
return NetworkUtils.getLocalAddress();
|
||||
} else if (host.startsWith("non_loopback")) {
|
||||
if (host.toLowerCase().endsWith(":ipv4")) {
|
||||
if (host.toLowerCase(Locale.ROOT).endsWith(":ipv4")) {
|
||||
return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv4);
|
||||
} else if (host.toLowerCase().endsWith(":ipv6")) {
|
||||
} else if (host.toLowerCase(Locale.ROOT).endsWith(":ipv6")) {
|
||||
return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv6);
|
||||
} else {
|
||||
return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.getIpStackType());
|
||||
}
|
||||
} else {
|
||||
NetworkUtils.StackType stackType = NetworkUtils.getIpStackType();
|
||||
if (host.toLowerCase().endsWith(":ipv4")) {
|
||||
if (host.toLowerCase(Locale.ROOT).endsWith(":ipv4")) {
|
||||
stackType = NetworkUtils.StackType.IPv4;
|
||||
host = host.substring(0, host.length() - 5);
|
||||
} else if (host.toLowerCase().endsWith(":ipv6")) {
|
||||
} else if (host.toLowerCase(Locale.ROOT).endsWith(":ipv6")) {
|
||||
stackType = NetworkUtils.StackType.IPv6;
|
||||
host = host.substring(0, host.length() - 5);
|
||||
}
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.os;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ -31,23 +33,23 @@ public class OsUtils {
|
||||
/**
|
||||
* True iff running on Linux.
|
||||
*/
|
||||
public static final boolean LINUX = OS_NAME.trim().toLowerCase().startsWith("linux");
|
||||
public static final boolean LINUX = OS_NAME.trim().toLowerCase(Locale.ROOT).startsWith("linux");
|
||||
/**
|
||||
* True iff running on Windows.
|
||||
*/
|
||||
public static final boolean WINDOWS = OS_NAME.trim().toLowerCase().startsWith("windows");
|
||||
public static final boolean WINDOWS = OS_NAME.trim().toLowerCase(Locale.ROOT).startsWith("windows");
|
||||
/**
|
||||
* True iff running on SunOS.
|
||||
*/
|
||||
public static final boolean SOLARIS = OS_NAME.trim().toLowerCase().startsWith("sun");
|
||||
public static final boolean SOLARIS = OS_NAME.trim().toLowerCase(Locale.ROOT).startsWith("sun");
|
||||
/**
|
||||
* True iff running on Mac.
|
||||
*/
|
||||
public static final boolean MAC = OS_NAME.trim().toLowerCase().startsWith("mac");
|
||||
public static final boolean MAC = OS_NAME.trim().toLowerCase(Locale.ROOT).startsWith("mac");
|
||||
/**
|
||||
* True iff running on HP.
|
||||
*/
|
||||
public static final boolean HP = OS_NAME.trim().toLowerCase().startsWith("hp");
|
||||
public static final boolean HP = OS_NAME.trim().toLowerCase(Locale.ROOT).startsWith("hp");
|
||||
|
||||
|
||||
private OsUtils() {
|
||||
|
@ -341,7 +341,7 @@ public class ImmutableSettings implements Settings {
|
||||
}
|
||||
|
||||
private <T> Class<? extends T> loadClass(String prefixValue, String sValue, String suffixClassName, String setting) {
|
||||
String fullClassName = prefixValue + toCamelCase(sValue).toLowerCase() + "." + Strings.capitalize(toCamelCase(sValue)) + suffixClassName;
|
||||
String fullClassName = prefixValue + toCamelCase(sValue).toLowerCase(Locale.ROOT) + "." + Strings.capitalize(toCamelCase(sValue)) + suffixClassName;
|
||||
try {
|
||||
return (Class<? extends T>) getClassLoader().loadClass(fullClassName);
|
||||
} catch (ClassNotFoundException e2) {
|
||||
@ -747,7 +747,7 @@ public class ImmutableSettings implements Settings {
|
||||
public Builder loadFromStream(String resourceName, InputStream is) throws SettingsException {
|
||||
SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromResource(resourceName);
|
||||
try {
|
||||
Map<String, String> loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, "UTF-8")));
|
||||
Map<String, String> loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, Streams.UTF8)));
|
||||
put(loadedSettings);
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + resourceName + "]", e);
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* A GC friendly long[].
|
||||
* Allocating large arrays (that are not short-lived) generate fragmentation
|
||||
@ -57,7 +59,7 @@ public class BigLongArray {
|
||||
|
||||
public void set(int idx, long value) {
|
||||
if (idx < 0 || idx > size)
|
||||
throw new IndexOutOfBoundsException(String.format("%d is not whithin [0, %d)", idx, size));
|
||||
throw new IndexOutOfBoundsException(String.format(Locale.ROOT, "%d is not whithin [0, %d)", idx, size));
|
||||
|
||||
int page = idx / pageSize;
|
||||
int pageIdx = idx % pageSize;
|
||||
@ -66,7 +68,7 @@ public class BigLongArray {
|
||||
|
||||
public long get(int idx) {
|
||||
if (idx < 0 || idx > size)
|
||||
throw new IndexOutOfBoundsException(String.format("%d is not whithin [0, %d)", idx, size));
|
||||
throw new IndexOutOfBoundsException(String.format(Locale.ROOT, "%d is not whithin [0, %d)", idx, size));
|
||||
|
||||
int page = idx / pageSize;
|
||||
int pageIdx = idx % pageSize;
|
||||
|
@ -164,7 +164,7 @@ public class Environment {
|
||||
}
|
||||
|
||||
public String resolveConfigAndLoadToString(String path) throws FailedToResolveConfigException, IOException {
|
||||
return Streams.copyToString(new InputStreamReader(resolveConfig(path).openStream(), "UTF-8"));
|
||||
return Streams.copyToString(new InputStreamReader(resolveConfig(path).openStream(), Streams.UTF8));
|
||||
}
|
||||
|
||||
public URL resolveConfig(String path) throws FailedToResolveConfigException {
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.rest.*;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.rest.RestStatus.*;
|
||||
@ -198,7 +199,7 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
|
||||
if (lastDot == -1) {
|
||||
return "";
|
||||
}
|
||||
String extension = path.substring(lastDot + 1).toLowerCase();
|
||||
String extension = path.substring(lastDot + 1).toLowerCase(Locale.ROOT);
|
||||
String mimeType = DEFAULT_MIME_TYPES.get(extension);
|
||||
if (mimeType == null) {
|
||||
return "";
|
||||
|
@ -244,7 +244,7 @@ public class Analysis {
|
||||
try {
|
||||
return loadWordList(new InputStreamReader(wordListFile.openStream(), Charsets.UTF_8), "#");
|
||||
} catch (IOException ioe) {
|
||||
String message = String.format("IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
throw new ElasticSearchIllegalArgumentException(message);
|
||||
}
|
||||
}
|
||||
@ -292,7 +292,7 @@ public class Analysis {
|
||||
try {
|
||||
reader = new InputStreamReader(fileUrl.openStream(), Charsets.UTF_8);
|
||||
} catch (IOException ioe) {
|
||||
String message = String.format("IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
throw new ElasticSearchIllegalArgumentException(message);
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.hunspell.HunspellDictionary;
|
||||
import org.apache.lucene.analysis.hunspell.HunspellStemFilter;
|
||||
@ -47,7 +49,7 @@ public class HunspellTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
dictionary = hunspellService.getDictionary(locale);
|
||||
if (dictionary == null) {
|
||||
throw new ElasticSearchIllegalArgumentException(String.format("Unknown hunspell dictionary for locale [%s]", locale));
|
||||
throw new ElasticSearchIllegalArgumentException(String.format(Locale.ROOT, "Unknown hunspell dictionary for locale [%s]", locale));
|
||||
}
|
||||
|
||||
dedup = settings.getAsBoolean("dedup", true);
|
||||
|
@ -1180,7 +1180,7 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine {
|
||||
try {
|
||||
for (AtomicReaderContext reader : searcher.reader().leaves()) {
|
||||
assert reader.reader() instanceof SegmentReader;
|
||||
SegmentInfoPerCommit info = Lucene.getSegmentInfo((SegmentReader) reader.reader());
|
||||
SegmentInfoPerCommit info = ((SegmentReader) reader.reader()).getSegmentInfo();
|
||||
assert !segments.containsKey(info.info.name);
|
||||
Segment segment = new Segment(info.info.name);
|
||||
segment.search = true;
|
||||
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
@ -80,8 +81,8 @@ public class ShardSlowLogIndexingService extends AbstractIndexShardComponent {
|
||||
|
||||
String level = settings.get(INDEX_INDEXING_SLOWLOG_LEVEL, ShardSlowLogIndexingService.this.level);
|
||||
if (!level.equals(ShardSlowLogIndexingService.this.level)) {
|
||||
ShardSlowLogIndexingService.this.indexLogger.setLevel(level.toUpperCase());
|
||||
ShardSlowLogIndexingService.this.deleteLogger.setLevel(level.toUpperCase());
|
||||
ShardSlowLogIndexingService.this.indexLogger.setLevel(level.toUpperCase(Locale.ROOT));
|
||||
ShardSlowLogIndexingService.this.deleteLogger.setLevel(level.toUpperCase(Locale.ROOT));
|
||||
ShardSlowLogIndexingService.this.level = level;
|
||||
}
|
||||
|
||||
@ -103,7 +104,7 @@ public class ShardSlowLogIndexingService extends AbstractIndexShardComponent {
|
||||
this.indexDebugThreshold = componentSettings.getAsTime("threshold.index.debug", TimeValue.timeValueNanos(-1)).nanos();
|
||||
this.indexTraceThreshold = componentSettings.getAsTime("threshold.index.trace", TimeValue.timeValueNanos(-1)).nanos();
|
||||
|
||||
this.level = componentSettings.get("level", "TRACE").toUpperCase();
|
||||
this.level = componentSettings.get("level", "TRACE").toUpperCase(Locale.ROOT);
|
||||
|
||||
this.indexLogger = Loggers.getLogger(logger, ".index");
|
||||
this.deleteLogger = Loggers.getLogger(logger, ".delete");
|
||||
|
@ -148,7 +148,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
||||
} else if (propName.equals("format")) {
|
||||
builder.dateTimeFormatter(parseDateTimeFormatter(propName, propNode));
|
||||
} else if (propName.equals("numeric_resolution")) {
|
||||
builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase()));
|
||||
builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT)));
|
||||
} else if (propName.equals("locale")) {
|
||||
builder.locale(parseLocal(propNode.toString()));
|
||||
}
|
||||
@ -446,7 +446,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
if (timeUnit != Defaults.TIME_UNIT) {
|
||||
builder.field("numeric_resolution", timeUnit.name().toLowerCase());
|
||||
builder.field("numeric_resolution", timeUnit.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (dateTimeFormatter.locale() != null) {
|
||||
builder.field("locale", dateTimeFormatter.format());
|
||||
|
@ -42,6 +42,7 @@ import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.doubleField;
|
||||
@ -495,7 +496,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
builder.startObject(name);
|
||||
builder.field("type", CONTENT_TYPE);
|
||||
if (pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase());
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (enableLatLon != Defaults.ENABLE_LATLON) {
|
||||
builder.field("lat_lon", enableLatLon);
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
@ -298,7 +299,7 @@ public class MultiFieldMapper implements Mapper, AllFieldMapper.IncludeInAll {
|
||||
builder.startObject(name);
|
||||
builder.field("type", CONTENT_TYPE);
|
||||
if (pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase());
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
builder.startObject("fields");
|
||||
|
@ -870,18 +870,18 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
|
||||
// inherit the root behavior
|
||||
if (this instanceof RootObjectMapper) {
|
||||
if (dynamic != Dynamic.TRUE) {
|
||||
builder.field("dynamic", dynamic.name().toLowerCase());
|
||||
builder.field("dynamic", dynamic.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
} else {
|
||||
if (dynamic != Defaults.DYNAMIC) {
|
||||
builder.field("dynamic", dynamic.name().toLowerCase());
|
||||
builder.field("dynamic", dynamic.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
}
|
||||
if (enabled != Defaults.ENABLED) {
|
||||
builder.field("enabled", enabled);
|
||||
}
|
||||
if (pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase());
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* A query that executes the query string against a field. It is a simplified
|
||||
@ -302,7 +303,7 @@ public class FieldQueryBuilder extends BaseQueryBuilder implements BoostableQuer
|
||||
builder.startObject(name);
|
||||
builder.field("query", query);
|
||||
if (defaultOperator != null) {
|
||||
builder.field("default_operator", defaultOperator.name().toLowerCase());
|
||||
builder.field("default_operator", defaultOperator.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -125,7 +126,7 @@ public class GeoDistanceFilterBuilder extends BaseFilterBuilder {
|
||||
}
|
||||
builder.field("distance", distance);
|
||||
if (geoDistance != null) {
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase());
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (optimizeBbox != null) {
|
||||
builder.field("optimize_bbox", optimizeBbox);
|
||||
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -164,7 +165,7 @@ public class GeoDistanceRangeFilterBuilder extends BaseFilterBuilder {
|
||||
builder.field("include_lower", includeLower);
|
||||
builder.field("include_upper", includeUpper);
|
||||
if (geoDistance != null) {
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase());
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (optimizeBbox != null) {
|
||||
builder.field("optimize_bbox", optimizeBbox);
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
|
||||
@ -330,7 +331,7 @@ public class QueryStringQueryBuilder extends BaseQueryBuilder implements Boostab
|
||||
builder.field("tie_breaker", tieBreaker);
|
||||
}
|
||||
if (defaultOperator != null) {
|
||||
builder.field("default_operator", defaultOperator.name().toLowerCase());
|
||||
builder.field("default_operator", defaultOperator.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
@ -118,7 +120,7 @@ public enum RegexpFlag {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
RegexpFlag flag = RegexpFlag.valueOf(s.toUpperCase());
|
||||
RegexpFlag flag = RegexpFlag.valueOf(s.toUpperCase(Locale.ROOT));
|
||||
if (flag == RegexpFlag.NONE) {
|
||||
continue;
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.NumberComparatorBase;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.SortMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ -64,7 +65,7 @@ public class NestedFieldComparatorSource extends IndexFieldData.XFieldComparator
|
||||
return new NestedFieldComparator.Avg((NumberComparatorBase) wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, numHits);
|
||||
default:
|
||||
throw new ElasticSearchIllegalArgumentException(
|
||||
String.format("Unsupported sort_mode[%s] for nested type", sortMode)
|
||||
String.format(Locale.ROOT, "Unsupported sort_mode[%s] for nested type", sortMode)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
@ -106,8 +107,8 @@ public class ShardSlowLogSearchService extends AbstractIndexShardComponent {
|
||||
|
||||
String level = settings.get(INDEX_SEARCH_SLOWLOG_LEVEL, ShardSlowLogSearchService.this.level);
|
||||
if (!level.equals(ShardSlowLogSearchService.this.level)) {
|
||||
ShardSlowLogSearchService.this.queryLogger.setLevel(level.toUpperCase());
|
||||
ShardSlowLogSearchService.this.fetchLogger.setLevel(level.toUpperCase());
|
||||
ShardSlowLogSearchService.this.queryLogger.setLevel(level.toUpperCase(Locale.ROOT));
|
||||
ShardSlowLogSearchService.this.fetchLogger.setLevel(level.toUpperCase(Locale.ROOT));
|
||||
ShardSlowLogSearchService.this.level = level;
|
||||
}
|
||||
|
||||
@ -134,7 +135,7 @@ public class ShardSlowLogSearchService extends AbstractIndexShardComponent {
|
||||
this.fetchDebugThreshold = componentSettings.getAsTime("threshold.fetch.debug", TimeValue.timeValueNanos(-1)).nanos();
|
||||
this.fetchTraceThreshold = componentSettings.getAsTime("threshold.fetch.trace", TimeValue.timeValueNanos(-1)).nanos();
|
||||
|
||||
this.level = componentSettings.get("level", "TRACE").toUpperCase();
|
||||
this.level = componentSettings.get("level", "TRACE").toUpperCase(Locale.ROOT);
|
||||
|
||||
this.queryLogger = Loggers.getLogger(logger, ".query");
|
||||
this.fetchLogger = Loggers.getLogger(logger, ".fetch");
|
||||
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FastByteArrayOutputStream;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
@ -840,7 +841,7 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
|
||||
}
|
||||
CheckIndex checkIndex = new CheckIndex(store.directory());
|
||||
FastByteArrayOutputStream os = new FastByteArrayOutputStream();
|
||||
PrintStream out = new PrintStream(os);
|
||||
PrintStream out = new PrintStream(os, false, Streams.UTF8.name());
|
||||
checkIndex.setInfoStream(out);
|
||||
out.flush();
|
||||
CheckIndex.Status status = checkIndex.checkIndex();
|
||||
|
@ -37,6 +37,7 @@ import java.net.MalformedURLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -153,7 +154,7 @@ public class HunspellService extends AbstractComponent {
|
||||
}
|
||||
File dicDir = new File(hunspellDir, locale);
|
||||
if (!dicDir.exists() || !dicDir.isDirectory()) {
|
||||
throw new ElasticSearchException(String.format("Could not find hunspell dictionary [%s]", locale));
|
||||
throw new ElasticSearchException(String.format(Locale.ROOT, "Could not find hunspell dictionary [%s]", locale));
|
||||
}
|
||||
|
||||
// merging node settings with hunspell dictionary specific settings
|
||||
@ -164,7 +165,7 @@ public class HunspellService extends AbstractComponent {
|
||||
|
||||
File[] affixFiles = dicDir.listFiles(AFFIX_FILE_FILTER);
|
||||
if (affixFiles.length != 1) {
|
||||
throw new ElasticSearchException(String.format("Missing affix file for hunspell dictionary [%s]", locale));
|
||||
throw new ElasticSearchException(String.format(Locale.ROOT, "Missing affix file for hunspell dictionary [%s]", locale));
|
||||
}
|
||||
InputStream affixStream = null;
|
||||
|
||||
@ -217,7 +218,7 @@ public class HunspellService extends AbstractComponent {
|
||||
try {
|
||||
return ImmutableSettings.settingsBuilder().loadFromUrl(file.toURI().toURL()).put(defaults).build();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new ElasticSearchException(String.format("Could not load hunspell dictionary settings from [%s]", file.getAbsolutePath()), e);
|
||||
throw new ElasticSearchException(String.format(Locale.ROOT, "Could not load hunspell dictionary settings from [%s]", file.getAbsolutePath()), e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -226,7 +227,7 @@ public class HunspellService extends AbstractComponent {
|
||||
try {
|
||||
return ImmutableSettings.settingsBuilder().loadFromUrl(file.toURI().toURL()).put(defaults).build();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new ElasticSearchException(String.format("Could not load hunspell dictionary settings from [%s]", file.getAbsolutePath()), e);
|
||||
throw new ElasticSearchException(String.format(Locale.ROOT, "Could not load hunspell dictionary settings from [%s]", file.getAbsolutePath()), e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -239,7 +240,7 @@ public class HunspellService extends AbstractComponent {
|
||||
static class DictionaryFileFilter implements FilenameFilter {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.toLowerCase().endsWith(".dic");
|
||||
return name.toLowerCase(Locale.ROOT).endsWith(".dic");
|
||||
}
|
||||
}
|
||||
|
||||
@ -249,7 +250,7 @@ public class HunspellService extends AbstractComponent {
|
||||
static class AffixFileFilter implements FilenameFilter {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.toLowerCase().endsWith(".aff");
|
||||
return name.toLowerCase(Locale.ROOT).endsWith(".aff");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.monitor.dump;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.ArrayList;
|
||||
@ -88,10 +89,6 @@ public abstract class AbstractDump implements Dump {
|
||||
|
||||
@Override
|
||||
public Writer createFileWriter(String name) throws DumpException {
|
||||
try {
|
||||
return new FileWriter(createFile(name));
|
||||
} catch (IOException e) {
|
||||
throw new DumpException("Failed to create file [" + name + "]", e);
|
||||
}
|
||||
return new OutputStreamWriter(createFileOutputStream(name), Streams.UTF8);
|
||||
}
|
||||
}
|
||||
|
@ -31,13 +31,14 @@ import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SummaryDumpContributor implements DumpContributor {
|
||||
|
||||
private final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
|
||||
private final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS", Locale.ROOT);
|
||||
private final Object formatterLock = new Object();
|
||||
|
||||
public static final String SUMMARY = "summary";
|
||||
|
@ -31,6 +31,7 @@ import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.MonitorInfo;
|
||||
import java.lang.management.ThreadInfo;
|
||||
import java.lang.management.ThreadMXBean;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -103,11 +104,11 @@ public class ThreadDumpContributor implements DumpContributor {
|
||||
}
|
||||
|
||||
private void write(ThreadInfo threadInfo, PrintWriter writer) {
|
||||
writer.print(String.format("\"%s\" Id=%s %s", threadInfo.getThreadName(), threadInfo.getThreadId(), threadInfo.getThreadState()));
|
||||
writer.print(String.format(Locale.ROOT, "\"%s\" Id=%s %s", threadInfo.getThreadName(), threadInfo.getThreadId(), threadInfo.getThreadState()));
|
||||
if (threadInfo.getLockName() != null) {
|
||||
writer.print(String.format(" on %s", threadInfo.getLockName()));
|
||||
writer.print(String.format(Locale.ROOT, " on %s", threadInfo.getLockName()));
|
||||
if (threadInfo.getLockOwnerName() != null)
|
||||
writer.print(String.format(" owned by \"%s\" Id=%s", threadInfo.getLockOwnerName(), threadInfo.getLockOwnerId()));
|
||||
writer.print(String.format(Locale.ROOT, " owned by \"%s\" Id=%s", threadInfo.getLockOwnerName(), threadInfo.getLockOwnerId()));
|
||||
}
|
||||
if (threadInfo.isInNative())
|
||||
writer.println(" (in native)");
|
||||
|
@ -163,7 +163,7 @@ public class HotThreads {
|
||||
time = hotties.get(t).blockedTime;
|
||||
}
|
||||
double percent = (((double) time) / interval.nanos()) * 100;
|
||||
sb.append(String.format("%n%4.1f%% (%s out of %s) %s usage by thread '%s'%n", percent, TimeValue.timeValueNanos(time), interval, type, allInfos[0][t].getThreadName()));
|
||||
sb.append(String.format(Locale.ROOT, "%n%4.1f%% (%s out of %s) %s usage by thread '%s'%n", percent, TimeValue.timeValueNanos(time), interval, type, allInfos[0][t].getThreadName()));
|
||||
// for each snapshot (2nd array index) find later snapshot for same thread with max number of
|
||||
// identical StackTraceElements (starting from end of each)
|
||||
boolean[] done = new boolean[threadElementsSnapshotCount];
|
||||
@ -190,14 +190,14 @@ public class HotThreads {
|
||||
}
|
||||
StackTraceElement[] show = allInfos[i][t].getStackTrace();
|
||||
if (count == 1) {
|
||||
sb.append(String.format(" unique snapshot%n"));
|
||||
sb.append(String.format(Locale.ROOT, " unique snapshot%n"));
|
||||
for (int l = 0; l < show.length; l++) {
|
||||
sb.append(String.format(" %s%n", show[l]));
|
||||
sb.append(String.format(Locale.ROOT, " %s%n", show[l]));
|
||||
}
|
||||
} else {
|
||||
sb.append(String.format(" %d/%d snapshots sharing following %d elements%n", count, threadElementsSnapshotCount, maxSim));
|
||||
sb.append(String.format(Locale.ROOT, " %d/%d snapshots sharing following %d elements%n", count, threadElementsSnapshotCount, maxSim));
|
||||
for (int l = show.length - maxSim; l < show.length; l++) {
|
||||
sb.append(String.format(" %s%n", show[l]));
|
||||
sb.append(String.format(Locale.ROOT, " %s%n", show[l]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,6 +31,7 @@ import org.elasticsearch.rest.action.support.RestActions;
|
||||
import org.elasticsearch.rest.action.support.RestXContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||
import static org.elasticsearch.rest.RestStatus.PRECONDITION_FAILED;
|
||||
@ -59,7 +60,7 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
clusterHealthRequest.timeout(request.paramAsTime("timeout", clusterHealthRequest.timeout()));
|
||||
String waitForStatus = request.param("wait_for_status");
|
||||
if (waitForStatus != null) {
|
||||
clusterHealthRequest.waitForStatus(ClusterHealthStatus.valueOf(waitForStatus.toUpperCase()));
|
||||
clusterHealthRequest.waitForStatus(ClusterHealthStatus.valueOf(waitForStatus.toUpperCase(Locale.ROOT)));
|
||||
}
|
||||
clusterHealthRequest.waitForRelocatingShards(request.paramAsInt("wait_for_relocating_shards", clusterHealthRequest.waitForRelocatingShards()));
|
||||
clusterHealthRequest.waitForActiveShards(request.paramAsInt("wait_for_active_shards", clusterHealthRequest.waitForActiveShards()));
|
||||
@ -98,7 +99,7 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
builder.startObject();
|
||||
|
||||
builder.field(Fields.CLUSTER_NAME, response.getClusterName());
|
||||
builder.field(Fields.STATUS, response.getStatus().name().toLowerCase());
|
||||
builder.field(Fields.STATUS, response.getStatus().name().toLowerCase(Locale.ROOT));
|
||||
builder.field(Fields.TIMED_OUT, response.isTimedOut());
|
||||
builder.field(Fields.NUMBER_OF_NODES, response.getNumberOfNodes());
|
||||
builder.field(Fields.NUMBER_OF_DATA_NODES, response.getNumberOfDataNodes());
|
||||
@ -138,7 +139,7 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
for (ClusterIndexHealth indexHealth : response) {
|
||||
builder.startObject(indexHealth.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
|
||||
|
||||
builder.field(Fields.STATUS, indexHealth.getStatus().name().toLowerCase());
|
||||
builder.field(Fields.STATUS, indexHealth.getStatus().name().toLowerCase(Locale.ROOT));
|
||||
builder.field(Fields.NUMBER_OF_SHARDS, indexHealth.getNumberOfShards());
|
||||
builder.field(Fields.NUMBER_OF_REPLICAS, indexHealth.getNumberOfReplicas());
|
||||
builder.field(Fields.ACTIVE_PRIMARY_SHARDS, indexHealth.getActivePrimaryShards());
|
||||
@ -161,7 +162,7 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
for (ClusterShardHealth shardHealth : indexHealth) {
|
||||
builder.startObject(Integer.toString(shardHealth.getId()));
|
||||
|
||||
builder.field(Fields.STATUS, shardHealth.getStatus().name().toLowerCase());
|
||||
builder.field(Fields.STATUS, shardHealth.getStatus().name().toLowerCase(Locale.ROOT));
|
||||
builder.field(Fields.PRIMARY_ACTIVE, shardHealth.isPrimaryActive());
|
||||
builder.field(Fields.ACTIVE_SHARDS, shardHealth.getActiveShards());
|
||||
builder.field(Fields.RELOCATING_SHARDS, shardHealth.getRelocatingShards());
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.inject.SpawnModules;
|
||||
import org.elasticsearch.common.settings.NoClassSettingsException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.Strings.toCamelCase;
|
||||
@ -79,7 +80,7 @@ public class RiverModule extends AbstractModule implements SpawnModules {
|
||||
try {
|
||||
return (Class<? extends Module>) globalSettings.getClassLoader().loadClass(fullClassName);
|
||||
} catch (ClassNotFoundException e2) {
|
||||
fullClassName = prefixPackage + toCamelCase(type).toLowerCase() + "." + Strings.capitalize(toCamelCase(type)) + suffixClassName;
|
||||
fullClassName = prefixPackage + toCamelCase(type).toLowerCase(Locale.ROOT) + "." + Strings.capitalize(toCamelCase(type)) + suffixClassName;
|
||||
try {
|
||||
return (Class<? extends Module>) globalSettings.getClassLoader().loadClass(fullClassName);
|
||||
} catch (ClassNotFoundException e3) {
|
||||
|
@ -121,7 +121,7 @@ public class ScriptService extends AbstractComponent {
|
||||
if (s.equals(ext)) {
|
||||
found = true;
|
||||
try {
|
||||
String script = Streams.copyToString(new InputStreamReader(new FileInputStream(file), "UTF-8"));
|
||||
String script = Streams.copyToString(new InputStreamReader(new FileInputStream(file), Streams.UTF8));
|
||||
staticCache.put(scriptName, new CompiledScript(engineService.types()[0], engineService.compile(script)));
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to load/compile script [{}]", e, scriptName);
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.search.facet.FacetBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -266,7 +267,7 @@ public class GeoDistanceFacetBuilder extends FacetBuilder {
|
||||
builder.field("unit", unit);
|
||||
}
|
||||
if (geoDistance != null) {
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase());
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
import org.elasticsearch.search.facet.FacetBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -226,7 +227,7 @@ public class TermsFacetBuilder extends FacetBuilder {
|
||||
}
|
||||
}
|
||||
if (comparatorType != null) {
|
||||
builder.field("order", comparatorType.name().toLowerCase());
|
||||
builder.field("order", comparatorType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (allTerms != null) {
|
||||
builder.field("all_terms", allTerms);
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
import org.elasticsearch.search.facet.FacetBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -139,7 +140,7 @@ public class TermsStatsFacetBuilder extends FacetBuilder {
|
||||
}
|
||||
|
||||
if (comparatorType != null) {
|
||||
builder.field("order", comparatorType.name().toLowerCase());
|
||||
builder.field("order", comparatorType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
if (size != -1) {
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.FilterBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* A geo distance based sorting on a geo point like field.
|
||||
@ -147,7 +148,7 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
builder.field("unit", unit);
|
||||
}
|
||||
if (geoDistance != null) {
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase());
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (order == SortOrder.DESC) {
|
||||
builder.field("reverse", true);
|
||||
|
@ -96,7 +96,7 @@ public class HttpClient {
|
||||
InputStream inputStream = urlConnection.getInputStream();
|
||||
String body = null;
|
||||
try {
|
||||
body = Streams.copyToString(new InputStreamReader(inputStream));
|
||||
body = Streams.copyToString(new InputStreamReader(inputStream, Streams.UTF8));
|
||||
} catch (IOException e1) {
|
||||
throw new ElasticSearchException("problem reading error stream", e1);
|
||||
}
|
||||
@ -105,7 +105,7 @@ public class HttpClient {
|
||||
InputStream errStream = urlConnection.getErrorStream();
|
||||
String body = null;
|
||||
try {
|
||||
body = Streams.copyToString(new InputStreamReader(errStream));
|
||||
body = Streams.copyToString(new InputStreamReader(errStream, Streams.UTF8));
|
||||
} catch (IOException e1) {
|
||||
throw new ElasticSearchException("problem reading error stream", e1);
|
||||
}
|
||||
|
@ -40,6 +40,7 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.google.common.collect.Maps.newHashMap;
|
||||
@ -787,13 +788,13 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||
int numberOfParents = 4;
|
||||
int numberOfChildrenPerParent = 123;
|
||||
for (int i = 1; i <= numberOfParents; i++) {
|
||||
String parentId = String.format("p%d", i);
|
||||
String parentId = String.format(Locale.ROOT, "p%d", i);
|
||||
client.prepareIndex("test", "parent", parentId)
|
||||
.setSource("p_field", String.format("p_value%d", i))
|
||||
.setSource("p_field", String.format(Locale.ROOT, "p_value%d", i))
|
||||
.execute()
|
||||
.actionGet();
|
||||
for (int j = 1; j <= numberOfChildrenPerParent; j++) {
|
||||
client.prepareIndex("test", "child", String.format("%s_c%d", parentId, j))
|
||||
client.prepareIndex("test", "child", String.format(Locale.ROOT, "%s_c%d", parentId, j))
|
||||
.setSource(
|
||||
"c_field1", parentId,
|
||||
"c_field2", i % 2 == 0 ? "even" : "not_even"
|
||||
@ -1233,13 +1234,13 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||
// index simple data
|
||||
int childId = 0;
|
||||
for (int i = 0; i < 10; i++) {
|
||||
String parentId = String.format("p%03d", i);
|
||||
String parentId = String.format(Locale.ROOT, "p%03d", i);
|
||||
client.prepareIndex("test", "parent", parentId)
|
||||
.setSource("p_field", parentId)
|
||||
.execute().actionGet();
|
||||
int j = childId;
|
||||
for (; j < childId + 50; j++) {
|
||||
String childUid = String.format("c%03d", j);
|
||||
String childUid = String.format(Locale.ROOT, "c%03d", j);
|
||||
client.prepareIndex("test", "child", childUid)
|
||||
.setSource("c_field", childUid)
|
||||
.setParent(parentId)
|
||||
|
@ -211,7 +211,7 @@ public class ExtendedFacetsTests extends AbstractNodesTests {
|
||||
TermsFacet actualFacetEntries = response.getFacets().facet("facet1");
|
||||
|
||||
List<Tuple<Text, Integer>> expectedFacetEntries = getExpectedFacetEntries(allFieldValues, queryControlFacets, size, compType, excludes, regex, allTerms);
|
||||
String reason = String.format("query: [%s] field: [%s] size: [%d] order: [%s] all_terms: [%s] fields: [%s] regex: [%s] excludes: [%s]", queryVal, facetField, size, compType, allTerms, useFields, regex, excludes);
|
||||
String reason = String.format(Locale.ROOT, "query: [%s] field: [%s] size: [%d] order: [%s] all_terms: [%s] fields: [%s] regex: [%s] excludes: [%s]", queryVal, facetField, size, compType, allTerms, useFields, regex, excludes);
|
||||
assertThat(reason, actualFacetEntries.getEntries().size(), equalTo(expectedFacetEntries.size()));
|
||||
for (int i = 0; i < expectedFacetEntries.size(); i++) {
|
||||
assertThat(reason, actualFacetEntries.getEntries().get(i).getTerm(), equalTo(expectedFacetEntries.get(i).v1()));
|
||||
|
@ -43,6 +43,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings.Builder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -569,7 +570,7 @@ public class SuggestSearchTests extends AbstractNodesTests {
|
||||
|
||||
client.admin().indices().prepareCreate("test").setSettings(builder.build()).addMapping("type1", mapping).execute().actionGet();
|
||||
client.admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(SuggestSearchTests.class.getResourceAsStream("/config/names.txt")));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(SuggestSearchTests.class.getResourceAsStream("/config/names.txt"), Streams.UTF8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
client.prepareIndex("test", "type1")
|
||||
@ -904,7 +905,7 @@ public class SuggestSearchTests extends AbstractNodesTests {
|
||||
|
||||
client.admin().indices().prepareCreate("test").setSettings(builder.build()).addMapping("type1", mapping).execute().actionGet();
|
||||
client.admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(SuggestSearchTests.class.getResourceAsStream("/config/names.txt")));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(SuggestSearchTests.class.getResourceAsStream("/config/names.txt"), Streams.UTF8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
client.prepareIndex("test", "type1")
|
||||
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRespon
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
@ -79,7 +80,7 @@ public class SimpleValidateQueryTests extends AbstractNodesTests {
|
||||
|
||||
client.admin().indices().prepareRefresh().execute().actionGet();
|
||||
|
||||
assertThat(client.admin().indices().prepareValidateQuery("test").setQuery("foo".getBytes()).execute().actionGet().isValid(), equalTo(false));
|
||||
assertThat(client.admin().indices().prepareValidateQuery("test").setQuery("foo".getBytes(Streams.UTF8)).execute().actionGet().isValid(), equalTo(false));
|
||||
assertThat(client.admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("_id:1")).execute().actionGet().isValid(), equalTo(true));
|
||||
assertThat(client.admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("_i:d:1")).execute().actionGet().isValid(), equalTo(false));
|
||||
|
||||
@ -119,7 +120,7 @@ public class SimpleValidateQueryTests extends AbstractNodesTests {
|
||||
|
||||
ValidateQueryResponse response;
|
||||
response = client.admin().indices().prepareValidateQuery("test")
|
||||
.setQuery("foo".getBytes())
|
||||
.setQuery("foo".getBytes(Streams.UTF8))
|
||||
.setExplain(true)
|
||||
.execute().actionGet();
|
||||
assertThat(response.isValid(), equalTo(false));
|
||||
@ -225,7 +226,7 @@ public class SimpleValidateQueryTests extends AbstractNodesTests {
|
||||
|
||||
ValidateQueryResponse response;
|
||||
response = client("node1").admin().indices().prepareValidateQuery("test")
|
||||
.setQuery("foo".getBytes())
|
||||
.setQuery("foo".getBytes(Streams.UTF8))
|
||||
.setExplain(true)
|
||||
.execute().actionGet();
|
||||
assertThat(response.isValid(), equalTo(false));
|
||||
@ -234,7 +235,7 @@ public class SimpleValidateQueryTests extends AbstractNodesTests {
|
||||
assertThat(response.getQueryExplanation().get(0).getExplanation(), nullValue());
|
||||
|
||||
response = client("node2").admin().indices().prepareValidateQuery("test")
|
||||
.setQuery("foo".getBytes())
|
||||
.setQuery("foo".getBytes(Streams.UTF8))
|
||||
.setExplain(true)
|
||||
.execute().actionGet();
|
||||
assertThat(response.isValid(), equalTo(false));
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||
@ -37,7 +38,7 @@ public class BulkRequestTests {
|
||||
public void testSimpleBulk1() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/test/unit/action/bulk/simple-bulk.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(), 0, bulkAction.length(), true, null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(Streams.UTF8), 0, bulkAction.length(), true, null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||
assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes()));
|
||||
assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class));
|
||||
@ -48,7 +49,7 @@ public class BulkRequestTests {
|
||||
public void testSimpleBulk2() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/test/unit/action/bulk/simple-bulk2.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(), 0, bulkAction.length(), true, null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(Streams.UTF8), 0, bulkAction.length(), true, null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||
}
|
||||
|
||||
@ -56,7 +57,7 @@ public class BulkRequestTests {
|
||||
public void testSimpleBulk3() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/test/unit/action/bulk/simple-bulk3.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(), 0, bulkAction.length(), true, null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(Streams.UTF8), 0, bulkAction.length(), true, null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||
}
|
||||
|
||||
@ -64,7 +65,7 @@ public class BulkRequestTests {
|
||||
public void testSimpleBulk4() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/test/unit/action/bulk/simple-bulk4.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(), 0, bulkAction.length(), true, null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(Streams.UTF8), 0, bulkAction.length(), true, null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(4));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.test.unit.common.io;
|
||||
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.io.*;
|
||||
@ -37,7 +38,7 @@ public class StreamsTests {
|
||||
|
||||
@Test
|
||||
public void testCopyFromInputStream() throws IOException {
|
||||
byte[] content = "content".getBytes();
|
||||
byte[] content = "content".getBytes(Streams.UTF8);
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(content);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
||||
long count = copy(in, out);
|
||||
@ -48,7 +49,7 @@ public class StreamsTests {
|
||||
|
||||
@Test
|
||||
public void testCopyFromByteArray() throws IOException {
|
||||
byte[] content = "content".getBytes();
|
||||
byte[] content = "content".getBytes(Streams.UTF8);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
||||
copy(content, out);
|
||||
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
|
||||
@ -56,7 +57,7 @@ public class StreamsTests {
|
||||
|
||||
@Test
|
||||
public void testCopyToByteArray() throws IOException {
|
||||
byte[] content = "content".getBytes();
|
||||
byte[] content = "content".getBytes(Streams.UTF8);
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(content);
|
||||
byte[] result = copyToByteArray(in);
|
||||
assertThat(Arrays.equals(content, result), equalTo(true));
|
||||
|
@ -113,7 +113,7 @@ public class XContentBuilderTests {
|
||||
public void testDateTypesConversion() throws Exception {
|
||||
Date date = new Date();
|
||||
String expectedDate = XContentBuilder.defaultDatePrinter.print(date.getTime());
|
||||
Calendar calendar = new GregorianCalendar();
|
||||
Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT);
|
||||
String expectedCalendar = XContentBuilder.defaultDatePrinter.print(calendar.getTimeInMillis());
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.startObject().field("date", date).endObject();
|
||||
|
@ -28,6 +28,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -46,7 +47,9 @@ import org.testng.annotations.Test;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.StringReader;
|
||||
import java.util.Set;
|
||||
|
||||
@ -169,7 +172,7 @@ public class AnalysisModuleTests {
|
||||
|
||||
BufferedWriter writer = null;
|
||||
try {
|
||||
writer = new BufferedWriter(new FileWriter(wordListFile));
|
||||
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(wordListFile), Streams.UTF8));
|
||||
for (String word : words) {
|
||||
writer.write(word);
|
||||
writer.write('\n');
|
||||
|
@ -64,6 +64,7 @@ import org.testng.annotations.Test;
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
@ -221,14 +222,14 @@ public class SimpleIdCacheTests {
|
||||
|
||||
private Document doc(String type, String id) {
|
||||
Document parent = new Document();
|
||||
parent.add(new StringField(UidFieldMapper.NAME, String.format("%s#%s", type, id), Field.Store.NO));
|
||||
parent.add(new StringField(UidFieldMapper.NAME, String.format(Locale.ROOT, "%s#%s", type, id), Field.Store.NO));
|
||||
return parent;
|
||||
}
|
||||
|
||||
private Document childDoc(String type, String id, String parentType, String parentId) {
|
||||
Document parent = new Document();
|
||||
parent.add(new StringField(UidFieldMapper.NAME, String.format("%s#%s", type, id), Field.Store.NO));
|
||||
parent.add(new StringField(ParentFieldMapper.NAME, String.format("%s#%s", parentType, parentId), Field.Store.NO));
|
||||
parent.add(new StringField(UidFieldMapper.NAME, String.format(Locale.ROOT, "%s#%s", type, id), Field.Store.NO));
|
||||
parent.add(new StringField(ParentFieldMapper.NAME, String.format(Locale.ROOT, "%s#%s", parentType, parentId), Field.Store.NO));
|
||||
return parent;
|
||||
}
|
||||
|
||||
|
@ -25,6 +25,7 @@ import static org.hamcrest.Matchers.nullValue;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Random;
|
||||
@ -113,10 +114,10 @@ public class DuellFieldDataTest extends AbstractFieldDataTests {
|
||||
right = left = list.remove(0);
|
||||
}
|
||||
ifdService.clear();
|
||||
IndexFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase()),
|
||||
IndexFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
left.getKey());
|
||||
ifdService.clear();
|
||||
IndexFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase()),
|
||||
IndexFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
right.getKey());
|
||||
duellFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
|
||||
duellFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);
|
||||
@ -169,10 +170,10 @@ public class DuellFieldDataTest extends AbstractFieldDataTests {
|
||||
right = left = list.remove(0);
|
||||
}
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase()),
|
||||
IndexNumericFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
left.getKey());
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase()),
|
||||
IndexNumericFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
right.getKey());
|
||||
duellFieldDataLong(random, context, leftFieldData, rightFieldData);
|
||||
duellFieldDataLong(random, context, rightFieldData, leftFieldData);
|
||||
@ -223,10 +224,10 @@ public class DuellFieldDataTest extends AbstractFieldDataTests {
|
||||
right = left = list.remove(0);
|
||||
}
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase()),
|
||||
IndexNumericFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
left.getKey());
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase()),
|
||||
IndexNumericFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
right.getKey());
|
||||
duellFieldDataDouble(random, context, leftFieldData, rightFieldData);
|
||||
duellFieldDataDouble(random, context, rightFieldData, leftFieldData);
|
||||
@ -283,10 +284,10 @@ public class DuellFieldDataTest extends AbstractFieldDataTests {
|
||||
right = left = list.remove(0);
|
||||
}
|
||||
ifdService.clear();
|
||||
IndexFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase()),
|
||||
IndexFieldData leftFieldData = ifdService.getForField(new FieldMapper.Names(left.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
left.getKey());
|
||||
ifdService.clear();
|
||||
IndexFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase()),
|
||||
IndexFieldData rightFieldData = ifdService.getForField(new FieldMapper.Names(right.getValue().name().toLowerCase(Locale.ROOT)),
|
||||
right.getKey());
|
||||
duellFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
|
||||
duellFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);
|
||||
|
@ -20,6 +20,8 @@
|
||||
package org.elasticsearch.test.unit.index.gateway;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.gateway.CommitPoint;
|
||||
@ -52,7 +54,7 @@ public class CommitPointsTests {
|
||||
CommitPoint commitPoint = new CommitPoint(1, "test", CommitPoint.Type.GENERATED, indexFiles, translogFiles);
|
||||
|
||||
byte[] serialized = CommitPoints.toXContent(commitPoint);
|
||||
logger.info("serialized commit_point {}", new String(serialized));
|
||||
logger.info("serialized commit_point {}", new String(serialized, Streams.UTF8));
|
||||
|
||||
CommitPoint desCp = CommitPoints.fromXContent(serialized);
|
||||
assertThat(desCp.version(), equalTo(commitPoint.version()));
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.test.unit.index.mapper.simple;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
@ -123,7 +124,7 @@ public class SimpleMapperTests {
|
||||
.add(object("name").add(stringField("first").store(true).index(false)))
|
||||
).build(mapperParser);
|
||||
|
||||
BytesReference json = new BytesArray("".getBytes());
|
||||
BytesReference json = new BytesArray("".getBytes(Streams.UTF8));
|
||||
try {
|
||||
docMapper.parse("person", "1", json).rootDoc();
|
||||
assertThat("this point is never reached", false);
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.test.unit.index.mapper.timestamp;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
@ -128,6 +130,6 @@ public class TimestampMappingTests {
|
||||
mapper.timestampFieldMapper().toXContent(builder, null);
|
||||
builder.endObject();
|
||||
|
||||
assertThat(builder.string(), is(String.format("{\"%s\":{}}", TimestampFieldMapper.NAME)));
|
||||
assertThat(builder.string(), is(String.format(Locale.ROOT, "{\"%s\":{}}", TimestampFieldMapper.NAME)));
|
||||
}
|
||||
}
|
@ -50,6 +50,7 @@ import org.apache.lucene.search.spell.SuggestMode;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.search.suggest.phrase.CandidateGenerator;
|
||||
import org.elasticsearch.search.suggest.phrase.Correction;
|
||||
import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator;
|
||||
@ -93,7 +94,7 @@ public class NoisyChannelSpellCheckerTests {
|
||||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_41, wrapper);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt")));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Streams.UTF8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
Document doc = new Document();
|
||||
@ -204,7 +205,7 @@ public class NoisyChannelSpellCheckerTests {
|
||||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_41, wrapper);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt")));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Streams.UTF8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
Document doc = new Document();
|
||||
@ -289,7 +290,7 @@ public class NoisyChannelSpellCheckerTests {
|
||||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_41, wrapper);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt")));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Streams.UTF8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
Document doc = new Document();
|
||||
|
Loading…
x
Reference in New Issue
Block a user