mirror of https://github.com/apache/lucene.git
LUCENE-5560: Followup: Cleanup charset handling for Java 7
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1583449 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5ba92db7b7
commit
6750845f9b
|
@ -428,7 +428,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
|||
}
|
||||
|
||||
try {
|
||||
return bos.toString("UTF-8");
|
||||
return bos.toString(IOUtils.UTF_8);
|
||||
} catch (UnsupportedEncodingException bogus) {
|
||||
throw new RuntimeException(bogus);
|
||||
}
|
||||
|
|
|
@ -294,7 +294,7 @@ public final class UnicodeUtil {
|
|||
private static boolean matches(char[] source, int offset, int length, byte[] result, int upto) {
|
||||
try {
|
||||
String s1 = new String(source, offset, length);
|
||||
String s2 = new String(result, 0, upto, "UTF-8");
|
||||
String s2 = new String(result, 0, upto, StandardCharsets.UTF_8);
|
||||
if (!s1.equals(s2)) {
|
||||
//System.out.println("DIFF: s1 len=" + s1.length());
|
||||
//for(int i=0;i<s1.length();i++)
|
||||
|
@ -320,7 +320,7 @@ public final class UnicodeUtil {
|
|||
private static boolean matches(String source, int offset, int length, byte[] result, int upto) {
|
||||
try {
|
||||
String s1 = source.substring(offset, offset+length);
|
||||
String s2 = new String(result, 0, upto, "UTF-8");
|
||||
String s2 = new String(result, 0, upto, StandardCharsets.UTF_8);
|
||||
if (!s1.equals(s2)) {
|
||||
// Allow a difference if s1 is not valid UTF-16
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.spatial4j.core.shape.Rectangle;
|
|||
import com.spatial4j.core.shape.Shape;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -41,7 +42,7 @@ import java.util.List;
|
|||
*/
|
||||
public abstract class SpatialPrefixTree {
|
||||
|
||||
protected static final Charset UTF8 = Charset.forName("UTF-8");
|
||||
protected static final Charset UTF8 = StandardCharsets.UTF_8;
|
||||
|
||||
protected final int maxLevels;
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.BufferedReader;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
|
@ -43,7 +44,7 @@ public class SpatialTestData {
|
|||
*/
|
||||
public static Iterator<SpatialTestData> getTestData(InputStream in, SpatialContext ctx) throws IOException {
|
||||
List<SpatialTestData> results = new ArrayList<>();
|
||||
BufferedReader bufInput = new BufferedReader(new InputStreamReader(in,"UTF-8"));
|
||||
BufferedReader bufInput = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line;
|
||||
while ((line = bufInput.readLine()) != null) {
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.spatial;
|
|||
*/
|
||||
|
||||
import com.spatial4j.core.context.SpatialContext;
|
||||
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialArgsParser;
|
||||
|
||||
|
@ -25,6 +26,7 @@ import java.io.BufferedReader;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -51,7 +53,7 @@ public class SpatialTestQuery {
|
|||
|
||||
List<SpatialTestQuery> results = new ArrayList<>();
|
||||
|
||||
BufferedReader bufInput = new BufferedReader(new InputStreamReader(in,"UTF-8"));
|
||||
BufferedReader bufInput = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line;
|
||||
for (int lineNumber = 1; (line = bufInput.readLine()) != null; lineNumber++) {
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.FileOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -328,7 +329,7 @@ public class FSTTester<T> {
|
|||
}
|
||||
|
||||
if (LuceneTestCase.VERBOSE && pairs.size() <= 20 && fst != null) {
|
||||
Writer w = new OutputStreamWriter(new FileOutputStream("out.dot"), "UTF-8");
|
||||
Writer w = new OutputStreamWriter(new FileOutputStream("out.dot"), StandardCharsets.UTF_8);
|
||||
Util.toDot(fst, w, false, false);
|
||||
w.close();
|
||||
System.out.println("SAVED out.dot");
|
||||
|
|
|
@ -18,10 +18,12 @@ package org.apache.solr.handler.dataimport;
|
|||
|
||||
import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
|
||||
import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Blob;
|
||||
import java.sql.Clob;
|
||||
import java.sql.SQLException;
|
||||
|
@ -106,7 +108,7 @@ public class FieldReaderDataSource extends DataSource<Reader> {
|
|||
private Reader getReader(Blob blob)
|
||||
throws SQLException, UnsupportedEncodingException {
|
||||
if (encoding == null) {
|
||||
return (new InputStreamReader(blob.getBinaryStream(), "UTF-8"));
|
||||
return (new InputStreamReader(blob.getBinaryStream(), StandardCharsets.UTF_8));
|
||||
} else {
|
||||
return (new InputStreamReader(blob.getBinaryStream(), encoding));
|
||||
}
|
||||
|
|
|
@ -17,10 +17,12 @@
|
|||
package org.apache.solr.handler.dataimport;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
|
||||
import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
|
||||
|
||||
|
@ -138,7 +140,7 @@ public class FileDataSource extends DataSource<Reader> {
|
|||
protected Reader openStream(File file) throws FileNotFoundException,
|
||||
UnsupportedEncodingException {
|
||||
if (encoding == null) {
|
||||
return new InputStreamReader(new FileInputStream(file), "UTF-8");
|
||||
return new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8);
|
||||
} else {
|
||||
return new InputStreamReader(new FileInputStream(file), encoding);
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* <p> Writes documents to SOLR. </p>
|
||||
|
@ -147,7 +148,7 @@ public class SolrWriter extends DIHWriterBase implements DIHWriter {
|
|||
|
||||
}
|
||||
}
|
||||
return new String(baos.toByteArray(), "UTF-8");
|
||||
return new String(baos.toByteArray(), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
static String getDocCount() {
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.InputStreamReader;
|
|||
import java.io.Reader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Properties;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
@ -140,7 +141,7 @@ public class URLDataSource extends DataSource<Reader> {
|
|||
|
||||
public static final String BASE_URL = "baseUrl";
|
||||
|
||||
public static final String UTF_8 = "UTF-8";
|
||||
public static final String UTF_8 = StandardCharsets.UTF_8.name();
|
||||
|
||||
public static final String CONNECTION_TIMEOUT_FIELD_NAME = "connectionTimeout";
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVE
|
|||
import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
|
|
|
@ -90,7 +90,7 @@ public class ZKPropertiesWriter extends SimplePropertiesWriter {
|
|||
try {
|
||||
byte[] data = zkClient.getData(path, null, null, false);
|
||||
if (data != null) {
|
||||
props.load(new StringReader(new String(data, "UTF-8")));
|
||||
props.load(new StringReader(new String(data, StandardCharsets.UTF_8)));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn(
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.junit.Before;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
|
||||
|
@ -30,7 +31,7 @@ import java.util.*;
|
|||
* @since solr 1.3
|
||||
*/
|
||||
public class TestBuiltInEvaluators extends AbstractDataImportHandlerTestCase {
|
||||
private static final String ENCODING = "UTF-8";
|
||||
private static final String ENCODING = StandardCharsets.UTF_8.name();
|
||||
|
||||
VariableResolver resolver;
|
||||
|
||||
|
|
|
@ -19,13 +19,13 @@ package org.apache.solr.handler.dataimport;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.solr.request.LocalSolrQueryRequest;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.io.File;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -247,14 +247,14 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
|
||||
Map<String, String> params = createMap("baseDir", tmpdir.getAbsolutePath());
|
||||
|
||||
createFile(tmpdir, "a.xml", "a.xml".getBytes("UTF-8"), true);
|
||||
createFile(tmpdir, "b.xml", "b.xml".getBytes("UTF-8"), true);
|
||||
createFile(tmpdir, "c.props", "c.props".getBytes("UTF-8"), true);
|
||||
createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true);
|
||||
runFullImport(dataConfigFileList, params);
|
||||
assertQ(req("*:*"), "//*[@numFound='3']");
|
||||
|
||||
// Add a new file after a full index is done
|
||||
createFile(tmpdir, "t.xml", "t.xml".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "t.xml", "t.xml".getBytes(StandardCharsets.UTF_8), false);
|
||||
runFullImport(dataConfigFileList, params);
|
||||
// we should find only 1 because by default clean=true is passed
|
||||
// and this particular import should find only one file t.xml
|
||||
|
|
|
@ -21,6 +21,7 @@ import org.junit.Test;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
|
||||
|
@ -41,9 +42,9 @@ public class TestFileListEntityProcessor extends AbstractDataImportHandlerTestCa
|
|||
tmpdir.delete();
|
||||
tmpdir.mkdir();
|
||||
tmpdir.deleteOnExit();
|
||||
createFile(tmpdir, "a.xml", "a.xml".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "b.xml", "b.xml".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "c.props", "c.props".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), false);
|
||||
createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), false);
|
||||
createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), false);
|
||||
Map attrs = createMap(
|
||||
FileListEntityProcessor.FILE_NAME, "xml$",
|
||||
FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath());
|
||||
|
@ -69,19 +70,19 @@ public class TestFileListEntityProcessor extends AbstractDataImportHandlerTestCa
|
|||
tmpdir.deleteOnExit();
|
||||
long minLength = Long.MAX_VALUE;
|
||||
String smallestFile = "";
|
||||
byte[] content = "abcdefgij".getBytes("UTF-8");
|
||||
byte[] content = "abcdefgij".getBytes(StandardCharsets.UTF_8);
|
||||
createFile(tmpdir, "a.xml", content, false);
|
||||
if (minLength > content.length) {
|
||||
minLength = content.length;
|
||||
smallestFile = "a.xml";
|
||||
}
|
||||
content = "abcdefgij".getBytes("UTF-8");
|
||||
content = "abcdefgij".getBytes(StandardCharsets.UTF_8);
|
||||
createFile(tmpdir, "b.xml", content, false);
|
||||
if (minLength > content.length) {
|
||||
minLength = content.length;
|
||||
smallestFile = "b.xml";
|
||||
}
|
||||
content = "abc".getBytes("UTF-8");
|
||||
content = "abc".getBytes(StandardCharsets.UTF_8);
|
||||
createFile(tmpdir, "c.props", content, false);
|
||||
if (minLength > content.length) {
|
||||
minLength = content.length;
|
||||
|
@ -137,9 +138,9 @@ public class TestFileListEntityProcessor extends AbstractDataImportHandlerTestCa
|
|||
tmpdir.delete();
|
||||
tmpdir.mkdir();
|
||||
tmpdir.deleteOnExit();
|
||||
createFile(tmpdir, "a.xml", "a.xml".getBytes("UTF-8"), true);
|
||||
createFile(tmpdir, "b.xml", "b.xml".getBytes("UTF-8"), true);
|
||||
createFile(tmpdir, "c.props", "c.props".getBytes("UTF-8"), true);
|
||||
createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true);
|
||||
Map attrs = createMap(
|
||||
FileListEntityProcessor.FILE_NAME, "xml$",
|
||||
FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(),
|
||||
|
@ -161,7 +162,7 @@ public class TestFileListEntityProcessor extends AbstractDataImportHandlerTestCa
|
|||
VariableResolver resolver = new VariableResolver();
|
||||
String lastMod = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ROOT).format(new Date(System.currentTimeMillis() - 50000));
|
||||
resolver.addNamespace("a", createMap("x", lastMod));
|
||||
createFile(tmpdir, "t.xml", "t.xml".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "t.xml", "t.xml".getBytes(StandardCharsets.UTF_8), false);
|
||||
fList = getFiles(resolver, attrs);
|
||||
assertEquals(1, fList.size());
|
||||
assertEquals("File name must be t.xml", new File(tmpdir, "t.xml").getAbsolutePath(), fList.get(0));
|
||||
|
@ -176,9 +177,9 @@ public class TestFileListEntityProcessor extends AbstractDataImportHandlerTestCa
|
|||
File childdir = new File(tmpdir + "/child" );
|
||||
childdir.mkdirs();
|
||||
childdir.deleteOnExit();
|
||||
createFile(childdir, "a.xml", "a.xml".getBytes("UTF-8"), true);
|
||||
createFile(childdir, "b.xml", "b.xml".getBytes("UTF-8"), true);
|
||||
createFile(childdir, "c.props", "c.props".getBytes("UTF-8"), true);
|
||||
createFile(childdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(childdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(childdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true);
|
||||
Map attrs = createMap(
|
||||
FileListEntityProcessor.FILE_NAME, "^.*\\.xml$",
|
||||
FileListEntityProcessor.BASE_DIR, childdir.getAbsolutePath(),
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package org.apache.solr.handler.dataimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.solr.request.LocalSolrQueryRequest;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -33,9 +34,9 @@ public class TestFileListWithLineEntityProcessor extends AbstractDataImportHandl
|
|||
tmpdir.delete();
|
||||
tmpdir.mkdir();
|
||||
tmpdir.deleteOnExit();
|
||||
createFile(tmpdir, "a.txt", "a line one\na line two\na line three".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "b.txt", "b line one\nb line two".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "c.txt", "c line one\nc line two\nc line three\nc line four".getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "a.txt", "a line one\na line two\na line three".getBytes(StandardCharsets.UTF_8), false);
|
||||
createFile(tmpdir, "b.txt", "b line one\nb line two".getBytes(StandardCharsets.UTF_8), false);
|
||||
createFile(tmpdir, "c.txt", "c line one\nc line two\nc line three\nc line four".getBytes(StandardCharsets.UTF_8), false);
|
||||
|
||||
String config = generateConfig(tmpdir);
|
||||
LocalSolrQueryRequest request = lrf.makeRequest(
|
||||
|
|
|
@ -21,6 +21,7 @@ import org.junit.Test;
|
|||
import java.io.File;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -46,7 +47,7 @@ public class TestXPathEntityProcessor extends AbstractDataImportHandlerTestCase
|
|||
tmpdir.delete();
|
||||
tmpdir.mkdir();
|
||||
tmpdir.deleteOnExit();
|
||||
createFile(tmpdir, "x.xsl", xsl.getBytes("UTF-8"), false);
|
||||
createFile(tmpdir, "x.xsl", xsl.getBytes(StandardCharsets.UTF_8), false);
|
||||
Map entityAttrs = createMap("name", "e", "url", "cd.xml",
|
||||
XPathEntityProcessor.FOR_EACH, "/catalog/cd");
|
||||
List fields = new ArrayList();
|
||||
|
@ -337,7 +338,7 @@ public class TestXPathEntityProcessor extends AbstractDataImportHandlerTestCase
|
|||
tmpdir.delete();
|
||||
tmpdir.mkdir();
|
||||
tmpdir.deleteOnExit();
|
||||
AbstractDataImportHandlerTestCase.createFile(tmpdir, "x.xsl", xsl.getBytes("UTF-8"),
|
||||
AbstractDataImportHandlerTestCase.createFile(tmpdir, "x.xsl", xsl.getBytes(StandardCharsets.UTF_8),
|
||||
false);
|
||||
Map entityAttrs = createMap("name", "e",
|
||||
XPathEntityProcessor.USE_SOLR_ADD_SCHEMA, "true", "xsl", ""
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -126,10 +127,9 @@ public class LangDetectLanguageIdentifierUpdateProcessorFactory extends
|
|||
}
|
||||
loaded = true;
|
||||
List<String> profileData = new ArrayList<>();
|
||||
Charset encoding = Charset.forName("UTF-8");
|
||||
for (String language : languages) {
|
||||
InputStream stream = LangDetectLanguageIdentifierUpdateProcessor.class.getResourceAsStream("langdetect-profiles/" + language);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, encoding));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8));
|
||||
profileData.add(new String(IOUtils.toCharArray(reader)));
|
||||
reader.close();
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.io.Writer;
|
|||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -924,7 +925,7 @@ public class MapReduceIndexerTool extends Configured implements Tool {
|
|||
FileSystem fs = fullInputList.getFileSystem(conf);
|
||||
FSDataOutputStream out = fs.create(fullInputList);
|
||||
try {
|
||||
Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8"));
|
||||
Writer writer = new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8));
|
||||
|
||||
for (Path inputFile : inputFiles) {
|
||||
FileSystem inputFileFs = inputFile.getFileSystem(conf);
|
||||
|
@ -949,7 +950,7 @@ public class MapReduceIndexerTool extends Configured implements Tool {
|
|||
in = inputList.getFileSystem(conf).open(inputList);
|
||||
}
|
||||
try {
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
writer.write(line + "\n");
|
||||
|
@ -988,7 +989,7 @@ public class MapReduceIndexerTool extends Configured implements Tool {
|
|||
|
||||
private void randomizeFewInputFiles(FileSystem fs, Path outputStep2Dir, Path fullInputList) throws IOException {
|
||||
List<String> lines = new ArrayList();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(fullInputList), "UTF-8"));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(fullInputList), StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
@ -1001,7 +1002,7 @@ public class MapReduceIndexerTool extends Configured implements Tool {
|
|||
Collections.shuffle(lines, new Random(421439783L)); // constant seed for reproducability
|
||||
|
||||
FSDataOutputStream out = fs.create(new Path(outputStep2Dir, FULL_INPUT_LIST));
|
||||
Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8"));
|
||||
Writer writer = new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8));
|
||||
try {
|
||||
for (String line : lines) {
|
||||
writer.write(line + "\n");
|
||||
|
@ -1135,7 +1136,7 @@ public class MapReduceIndexerTool extends Configured implements Tool {
|
|||
* turnaround during trial & debug sessions
|
||||
*/
|
||||
private void dryRun(MorphlineMapRunner runner, FileSystem fs, Path fullInputList) throws IOException {
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(fullInputList), "UTF-8"));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(fullInputList), StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
@ -1154,7 +1155,7 @@ public class MapReduceIndexerTool extends Configured implements Tool {
|
|||
int numFiles = 0;
|
||||
FSDataOutputStream out = fs.create(fullInputList);
|
||||
try {
|
||||
Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8"));
|
||||
Writer writer = new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8));
|
||||
for (FileStatus stat : dirs) {
|
||||
LOG.debug("Adding path {}", stat.getPath());
|
||||
Path dir = new Path(stat.getPath(), "data/index");
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.FileOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
|
@ -242,7 +243,7 @@ public class SolrOutputFormat<K, V> extends FileOutputFormat<K, V> {
|
|||
|
||||
ZipEntry ze = new ZipEntry("solr.xml");
|
||||
zos.putNextEntry(ze);
|
||||
zos.write("<cores><core name=\"collection1\" instanceDir=\".\"/></cores>".getBytes("UTF-8"));
|
||||
zos.write("<cores><core name=\"collection1\" instanceDir=\".\"/></cores>".getBytes(StandardCharsets.UTF_8));
|
||||
zos.flush();
|
||||
zos.closeEntry();
|
||||
zos.close();
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.PrintWriter;
|
|||
import java.io.StringReader;
|
||||
import java.io.StringWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import net.sourceforge.argparse4j.ArgumentParsers;
|
||||
import net.sourceforge.argparse4j.helper.ASCIITextWidthCounter;
|
||||
|
@ -42,7 +43,7 @@ class ToolRunnerHelpFormatter {
|
|||
String msg;
|
||||
try {
|
||||
ToolRunner.printGenericCommandUsage(new PrintStream(bout, true, "UTF-8"));
|
||||
msg = new String(bout.toByteArray(), "UTF-8");
|
||||
msg = new String(bout.toByteArray(), StandardCharsets.UTF_8);
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e); // unreachable
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.DataInput;
|
|||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
public class UnbufferedDataInputInputStream extends org.apache.solr.common.util.DataInputInputStream {
|
||||
private final DataInputStream in;
|
||||
|
@ -97,7 +98,7 @@ public class UnbufferedDataInputInputStream extends org.apache.solr.common.util.
|
|||
|
||||
@Override
|
||||
public String readLine() throws IOException {
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
|
||||
return reader.readLine();
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.File;
|
||||
import java.io.PrintStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
|
@ -191,7 +192,7 @@ public class MapReduceIndexerToolArgumentParserTest extends SolrTestCaseJ4 {
|
|||
public void testArgsParserHelp() throws UnsupportedEncodingException {
|
||||
String[] args = new String[] { "--help" };
|
||||
assertEquals(new Integer(0), parser.parseArgs(args, conf, opts));
|
||||
String helpText = new String(bout.toByteArray(), "UTF-8");
|
||||
String helpText = new String(bout.toByteArray(), StandardCharsets.UTF_8);
|
||||
assertTrue(helpText.contains("MapReduce batch job driver that "));
|
||||
assertTrue(helpText.contains("bin/hadoop command"));
|
||||
assertEquals(0, berr.toByteArray().length);
|
||||
|
@ -458,9 +459,9 @@ public class MapReduceIndexerToolArgumentParserTest extends SolrTestCaseJ4 {
|
|||
|
||||
private void assertArgumentParserException(String[] args) throws UnsupportedEncodingException {
|
||||
assertEquals("should have returned fail code", new Integer(1), parser.parseArgs(args, conf, opts));
|
||||
assertEquals("no sys out expected:" + new String(bout.toByteArray(), "UTF-8"), 0, bout.toByteArray().length);
|
||||
assertEquals("no sys out expected:" + new String(bout.toByteArray(), StandardCharsets.UTF_8), 0, bout.toByteArray().length);
|
||||
String usageText;
|
||||
usageText = new String(berr.toByteArray(), "UTF-8");
|
||||
usageText = new String(berr.toByteArray(), StandardCharsets.UTF_8);
|
||||
|
||||
assertTrue("should start with usage msg \"usage: hadoop \":" + usageText, usageText.startsWith("usage: hadoop "));
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.OutputStream;
|
|||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.lang.reflect.Array;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
@ -307,7 +308,7 @@ public class MorphlineBasicMiniMRTest extends SolrTestCaseJ4 {
|
|||
assertTrue(fs.mkdirs(inDir));
|
||||
Path INPATH = new Path(inDir, "input.txt");
|
||||
OutputStream os = fs.create(INPATH);
|
||||
Writer wr = new OutputStreamWriter(os, "UTF-8");
|
||||
Writer wr = new OutputStreamWriter(os, StandardCharsets.UTF_8);
|
||||
wr.write(DATADIR + "/" + inputAvroFile);
|
||||
wr.close();
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.UnsupportedEncodingException;
|
|||
import java.io.Writer;
|
||||
import java.lang.reflect.Array;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -677,7 +678,7 @@ public class MorphlineGoLiveMiniMRTest extends AbstractFullDistribZkTestBase {
|
|||
Path dataDir, String localFile) throws IOException, UnsupportedEncodingException {
|
||||
Path INPATH = new Path(inDir, "input.txt");
|
||||
OutputStream os = fs.create(INPATH);
|
||||
Writer wr = new OutputStreamWriter(os, "UTF-8");
|
||||
Writer wr = new OutputStreamWriter(os, StandardCharsets.UTF_8);
|
||||
wr.write(DATADIR + File.separator + localFile);
|
||||
wr.close();
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.commons.collections.ExtendedProperties;
|
|||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
@ -57,11 +57,7 @@ public class SolrParamResourceLoader extends ResourceLoader {
|
|||
@Override
|
||||
public InputStream getResourceStream(String s) throws ResourceNotFoundException {
|
||||
String template = templates.get(s);
|
||||
try {
|
||||
return template == null ? null : new ByteArrayInputStream(template.getBytes("UTF-8"));
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e); // may not happen
|
||||
}
|
||||
return template == null ? null : new ByteArrayInputStream(template.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1626,7 +1626,7 @@ public final class ZkController {
|
|||
return hostName + ':' + hostPort + '_' +
|
||||
URLEncoder.encode(trimLeadingAndTrailingSlashes(hostContext), "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalStateException("JVM Does not seem to support UTF-8", e);
|
||||
throw new Error("JVM Does not seem to support UTF-8", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
@ -82,14 +83,12 @@ public abstract class ConfigSolr {
|
|||
|
||||
public static ConfigSolr fromInputStream(SolrResourceLoader loader, InputStream is) {
|
||||
try {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
ByteStreams.copy(is, baos);
|
||||
String originalXml = IOUtils.toString(new ByteArrayInputStream(baos.toByteArray()), "UTF-8");
|
||||
ByteArrayInputStream dup = new ByteArrayInputStream(baos.toByteArray());
|
||||
byte[] buf = IOUtils.toByteArray(is);
|
||||
String originalXml = new String(buf, StandardCharsets.UTF_8);
|
||||
ByteArrayInputStream dup = new ByteArrayInputStream(buf);
|
||||
Config config = new Config(loader, null, new InputSource(dup), null, false);
|
||||
return fromConfig(config, originalXml);
|
||||
}
|
||||
catch (Exception e) {
|
||||
} catch (Exception e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,10 +43,7 @@ import org.xml.sax.InputSource;
|
|||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -152,7 +149,7 @@ public class EditFileRequestHandler extends RequestHandlerBase {
|
|||
return; // Error already in rsp.
|
||||
}
|
||||
|
||||
data = IOUtils.toByteArray(new InputStreamReader(stream.getStream(), "UTF-8"), "UTF-8");
|
||||
data = IOUtils.toByteArray(stream.getStream());
|
||||
|
||||
// If it's "solrconfig.xml", try parsing it as that object. Otherwise, if it ends in '.xml',
|
||||
// see if it at least parses.
|
||||
|
|
|
@ -162,7 +162,7 @@ public abstract class BaseSolrResource extends ServerResource {
|
|||
binWriter.write(outputStream, solrRequest, solrResponse);
|
||||
} else {
|
||||
String charset = ContentStreamBase.getCharsetFromContentType(contentType);
|
||||
Writer out = (charset == null || charset.equalsIgnoreCase("UTF-8"))
|
||||
Writer out = (charset == null)
|
||||
? new OutputStreamWriter(outputStream, UTF8)
|
||||
: new OutputStreamWriter(outputStream, charset);
|
||||
out = new FastWriter(out);
|
||||
|
|
|
@ -767,7 +767,7 @@ public class SolrDispatchFilter implements Filter
|
|||
binWriter.write(response.getOutputStream(), solrReq, solrRsp);
|
||||
} else {
|
||||
String charset = ContentStreamBase.getCharsetFromContentType(ct);
|
||||
Writer out = (charset == null || charset.equalsIgnoreCase("UTF-8"))
|
||||
Writer out = (charset == null)
|
||||
? new OutputStreamWriter(response.getOutputStream(), UTF8)
|
||||
: new OutputStreamWriter(response.getOutputStream(), charset);
|
||||
out = new FastWriter(out);
|
||||
|
|
|
@ -21,18 +21,18 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.update.AddUpdateCommand;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -121,7 +121,7 @@ public class RegexpBoostProcessor extends UpdateRequestProcessor {
|
|||
private List<BoostEntry> initBoostEntries(InputStream is) throws IOException {
|
||||
List<BoostEntry> newBoostEntries = new ArrayList<>();
|
||||
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
|
|
@ -18,37 +18,37 @@ package org.apache.solr.util;
|
|||
*/
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.ProtocolException;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
import java.util.TimeZone;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.Inflater;
|
||||
import java.util.zip.InflaterInputStream;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.ProtocolException;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
@ -893,13 +893,7 @@ public class SimplePostTool {
|
|||
* @return the input stream
|
||||
*/
|
||||
public static InputStream stringToStream(String s) {
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new ByteArrayInputStream(s.getBytes("UTF-8"));
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
fatal("Shouldn't happen: UTF-8 not supported?!?!?!");
|
||||
}
|
||||
return is;
|
||||
return new ByteArrayInputStream(s.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -961,10 +955,9 @@ public class SimplePostTool {
|
|||
/**
|
||||
* Takes a string as input and returns a DOM
|
||||
*/
|
||||
public static Document makeDom(String in, String inputEncoding) throws SAXException, IOException,
|
||||
public static Document makeDom(byte[] in) throws SAXException, IOException,
|
||||
ParserConfigurationException {
|
||||
InputStream is = new ByteArrayInputStream(in
|
||||
.getBytes(inputEncoding));
|
||||
InputStream is = new ByteArrayInputStream(in);
|
||||
Document dom = DocumentBuilderFactory.newInstance()
|
||||
.newDocumentBuilder().parse(is);
|
||||
return dom;
|
||||
|
@ -1105,7 +1098,7 @@ public class SimplePostTool {
|
|||
*/
|
||||
protected List<String> parseRobotsTxt(InputStream is) throws IOException {
|
||||
List<String> disallows = new ArrayList<>();
|
||||
BufferedReader r = new BufferedReader(new InputStreamReader(is, "UTF-8"));
|
||||
BufferedReader r = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||
String l;
|
||||
while((l = r.readLine()) != null) {
|
||||
String[] arr = l.split("#");
|
||||
|
@ -1137,10 +1130,9 @@ public class SimplePostTool {
|
|||
URL extractUrl = new URL(appendParam(postUrl.toString(), "extractOnly=true"));
|
||||
boolean success = postData(is, null, os, type, extractUrl);
|
||||
if(success) {
|
||||
String rawXml = os.toString("UTF-8");
|
||||
Document d = makeDom(rawXml, "UTF-8");
|
||||
Document d = makeDom(os.toByteArray());
|
||||
String innerXml = getXP(d, "/response/str/text()[1]", false);
|
||||
d = makeDom(innerXml, "UTF-8");
|
||||
d = makeDom(innerXml.getBytes(StandardCharsets.UTF_8));
|
||||
NodeList links = getNodesFromXP(d, "/html/body//a/@href");
|
||||
for(int i = 0; i < links.getLength(); i++) {
|
||||
String link = links.item(i).getTextContent();
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.solr;
|
|||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
@ -468,7 +469,7 @@ public class BasicFunctionalityTest extends SolrTestCaseJ4 {
|
|||
|
||||
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
builder.parse(new ByteArrayInputStream
|
||||
(writer.toString().getBytes("UTF-8")));
|
||||
(writer.toString().getBytes(StandardCharsets.UTF_8)));
|
||||
req.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.io.InputStream;
|
|||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -62,7 +63,7 @@ public class LegacyHTMLStripCharFilterTest extends BaseTokenStreamTestCase {
|
|||
//Some sanity checks, but not a full-fledged check
|
||||
public void testHTML() throws Exception {
|
||||
InputStream stream = getClass().getResourceAsStream("htmlStripReaderTest.html");
|
||||
LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new InputStreamReader(stream, "UTF-8"));
|
||||
LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new InputStreamReader(stream, StandardCharsets.UTF_8));
|
||||
StringBuilder builder = new StringBuilder();
|
||||
int ch = -1;
|
||||
while ((ch = reader.read()) != -1){
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -82,7 +83,7 @@ public class AbstractAnalyticsStatsTest extends SolrTestCaseJ4 {
|
|||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setNamespaceAware(true); // never forget this!
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
doc = builder.parse(new InputSource(new ByteArrayInputStream(response.getBytes("UTF-8"))));
|
||||
doc = builder.parse(new InputSource(new ByteArrayInputStream(response.getBytes(StandardCharsets.UTF_8))));
|
||||
xPathFact = XPathFactory.newInstance();
|
||||
rawResponse = response;
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -65,7 +66,7 @@ public class AbstractAnalyticsFacetTest extends SolrTestCaseJ4 {
|
|||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setNamespaceAware(true); // never forget this!
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
doc = builder.parse(new InputSource(new ByteArrayInputStream(response.getBytes("UTF-8"))));
|
||||
doc = builder.parse(new InputSource(new ByteArrayInputStream(response.getBytes(StandardCharsets.UTF_8))));
|
||||
xPathFact = XPathFactory.newInstance();
|
||||
rawResponse = response;
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.solr.common.util.NamedList;
|
|||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
public class TestModifyConfFiles extends AbstractFullDistribZkTestBase {
|
||||
|
||||
|
@ -93,7 +94,7 @@ public class TestModifyConfFiles extends AbstractFullDistribZkTestBase {
|
|||
client.request(request);
|
||||
|
||||
SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient();
|
||||
String contents = new String(zkClient.getData("/configs/conf1/schema.xml", null, null, true), "UTF-8");
|
||||
String contents = new String(zkClient.getData("/configs/conf1/schema.xml", null, null, true), StandardCharsets.UTF_8);
|
||||
|
||||
assertTrue("Schema contents should have changed!", contents.contains("<schema name=\"tiny\" version=\"1.1\">"));
|
||||
|
||||
|
@ -107,7 +108,7 @@ public class TestModifyConfFiles extends AbstractFullDistribZkTestBase {
|
|||
|
||||
client.request(request);
|
||||
|
||||
contents = new String(zkClient.getData("/configs/conf1/velocity/test.vm", null, null, true), "UTF-8");
|
||||
contents = new String(zkClient.getData("/configs/conf1/velocity/test.vm", null, null, true), StandardCharsets.UTF_8);
|
||||
assertTrue("Should have found new content in a velocity/test.vm.",
|
||||
contents.indexOf("Some bogus stuff for a test.") != -1);
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.File;
|
|||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -156,7 +157,7 @@ public class ZkCLITest extends SolrTestCaseJ4 {
|
|||
|
||||
zkClient.getData("/data.txt", null, null, true);
|
||||
|
||||
assertArrayEquals(zkClient.getData("/data.txt", null, null, true), data.getBytes("UTF-8"));
|
||||
assertArrayEquals(zkClient.getData("/data.txt", null, null, true), data.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -166,12 +167,12 @@ public class ZkCLITest extends SolrTestCaseJ4 {
|
|||
"putfile", "/solr.xml", SOLR_HOME + File.separator + "solr-stress-new.xml"};
|
||||
ZkCLI.main(args);
|
||||
|
||||
String fromZk = new String(zkClient.getData("/solr.xml", null, null, true), "UTF-8");
|
||||
String fromZk = new String(zkClient.getData("/solr.xml", null, null, true), StandardCharsets.UTF_8);
|
||||
File locFile = new File(SOLR_HOME + File.separator + "solr-stress-new.xml");
|
||||
InputStream is = new FileInputStream(locFile);
|
||||
String fromLoc;
|
||||
try {
|
||||
fromLoc = new String(IOUtils.toByteArray(is), "UTF-8");
|
||||
fromLoc = new String(IOUtils.toByteArray(is), StandardCharsets.UTF_8);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
|
@ -267,7 +268,7 @@ public class ZkCLITest extends SolrTestCaseJ4 {
|
|||
@Test
|
||||
public void testGet() throws Exception {
|
||||
String getNode = "/getNode";
|
||||
byte [] data = new String("getNode-data").getBytes("UTF-8");
|
||||
byte [] data = new String("getNode-data").getBytes(StandardCharsets.UTF_8);
|
||||
this.zkClient.create(getNode, data, CreateMode.PERSISTENT, true);
|
||||
String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd",
|
||||
"get", getNode};
|
||||
|
@ -277,7 +278,7 @@ public class ZkCLITest extends SolrTestCaseJ4 {
|
|||
@Test
|
||||
public void testGetFile() throws Exception {
|
||||
String getNode = "/getFileNode";
|
||||
byte [] data = new String("getFileNode-data").getBytes("UTF-8");
|
||||
byte [] data = new String("getFileNode-data").getBytes(StandardCharsets.UTF_8);
|
||||
this.zkClient.create(getNode, data, CreateMode.PERSISTENT, true);
|
||||
|
||||
File file = new File(dataDir,
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -72,21 +73,21 @@ public class TestSolrXMLSerializer extends SolrTestCaseJ4 {
|
|||
sharedLibVal, adminPathKey, adminPathVal, shareSchemaKey,
|
||||
shareSchemaVal, instanceDirKey, instanceDirVal);
|
||||
|
||||
Writer w = new StringWriter();
|
||||
StringWriter w = new StringWriter();
|
||||
try {
|
||||
serializer.persist(w, solrXMLDef);
|
||||
} finally {
|
||||
w.close();
|
||||
}
|
||||
|
||||
assertResults(((StringWriter) w).getBuffer().toString().getBytes("UTF-8"));
|
||||
assertResults(w.toString().getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
// again with default file
|
||||
File tmpFile = TestUtil.createTempFile("solr.xml", null, dataDir);
|
||||
|
||||
serializer.persistFile(tmpFile, solrXMLDef);
|
||||
|
||||
assertResults(FileUtils.readFileToString(tmpFile, "UTF-8").getBytes("UTF-8"));
|
||||
assertResults(FileUtils.readFileToByteArray(tmpFile));
|
||||
tmpFile.delete();
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.junit.BeforeClass;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
|
||||
|
@ -41,7 +42,6 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
|
|||
}
|
||||
|
||||
String filename;
|
||||
String def_charset = "UTF-8";
|
||||
File file;
|
||||
|
||||
@Override
|
||||
|
@ -66,12 +66,8 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
|
|||
}
|
||||
|
||||
void makeFile(String contents) {
|
||||
makeFile(contents,def_charset);
|
||||
}
|
||||
|
||||
void makeFile(String contents, String charset) {
|
||||
try {
|
||||
Writer out = new OutputStreamWriter(new FileOutputStream(filename), charset);
|
||||
Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8);
|
||||
out.write(contents);
|
||||
out.close();
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.solr.internal.csv.writer;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
|
@ -57,7 +58,7 @@ public class CSVConfigGuesserTest extends TestCase {
|
|||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("1234;abcd;1234\n");
|
||||
sb.append("abcd;1234;abcd");
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(sb.toString().getBytes("UTF-8"));
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8));
|
||||
CSVConfigGuesser guesser = new CSVConfigGuesser(in);
|
||||
CSVConfig guessed = guesser.guess();
|
||||
assertEquals(expected.isFixedWidth(), guessed.isFixedWidth());
|
||||
|
@ -80,7 +81,7 @@ public class CSVConfigGuesserTest extends TestCase {
|
|||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("1,2,3,4\n");
|
||||
sb.append("abcd,1234,abcd,1234");
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(sb.toString().getBytes("UTF-8"));
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8));
|
||||
CSVConfigGuesser guesser = new CSVConfigGuesser(in);
|
||||
CSVConfig guessed = guesser.guess();
|
||||
assertEquals(expected.isFixedWidth(), guessed.isFixedWidth());
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.request;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -92,7 +93,7 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
|
|||
|
||||
rsp.add("byte", Byte.valueOf((byte)-3));
|
||||
rsp.add("short", Short.valueOf((short)-4));
|
||||
rsp.add("bytes", "abc".getBytes("UTF-8"));
|
||||
rsp.add("bytes", "abc".getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
w.write(buf, req, rsp);
|
||||
jsonEq("{\"nl\":[[\"data1\",\"he\\u2028llo\\u2029!\"],[null,42]],\"byte\":-3,\"short\":-4,\"bytes\":\"YWJj\"}", buf.toString());
|
||||
|
|
|
@ -42,6 +42,7 @@ import java.io.StringWriter;
|
|||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* See SOLR-2854.
|
||||
|
@ -97,7 +98,7 @@ public class TestRemoteStreaming extends SolrJettyTestBase {
|
|||
InputStream inputStream = (InputStream) obj;
|
||||
try {
|
||||
StringWriter strWriter = new StringWriter();
|
||||
IOUtils.copy(new InputStreamReader(inputStream, "UTF-8"),strWriter);
|
||||
IOUtils.copy(new InputStreamReader(inputStream, StandardCharsets.UTF_8),strWriter);
|
||||
return strWriter.toString();
|
||||
} finally {
|
||||
IOUtils.closeQuietly(inputStream);
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream;
|
|||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.solr.client.solrj.ResponseParser;
|
||||
|
@ -119,7 +120,7 @@ public class TestWriterPerf extends AbstractSolrTestCase {
|
|||
out = new ByteArrayOutputStream();
|
||||
// to be fair, from my previous tests, much of the performance will be sucked up
|
||||
// by java's UTF-8 encoding/decoding, not the actual writing
|
||||
Writer writer = new OutputStreamWriter(out, "UTF-8");
|
||||
Writer writer = new OutputStreamWriter(out, StandardCharsets.UTF_8);
|
||||
w.write(writer, req, rsp);
|
||||
writer.close();
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.zookeeper.KeeperException;
|
|||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
public class TestCloudManagedSchema extends AbstractFullDistribZkTestBase {
|
||||
|
@ -89,14 +90,14 @@ public class TestCloudManagedSchema extends AbstractFullDistribZkTestBase {
|
|||
private String getFileContentFromZooKeeper(SolrZkClient zkClient, String fileName)
|
||||
throws IOException, SolrServerException, KeeperException, InterruptedException {
|
||||
|
||||
return (new String(zkClient.getData(fileName, null, null, true), "UTF-8"));
|
||||
return (new String(zkClient.getData(fileName, null, null, true), StandardCharsets.UTF_8));
|
||||
|
||||
}
|
||||
protected final void assertFileNotInZooKeeper(SolrZkClient zkClient, String parent, String fileName) throws Exception {
|
||||
List<String> kids = zkClient.getChildren(parent, null, true);
|
||||
for (String kid : kids) {
|
||||
if (kid.equalsIgnoreCase(fileName)) {
|
||||
String rawContent = new String(zkClient.getData(fileName, null, null, true), "UTF-8");
|
||||
String rawContent = new String(zkClient.getData(fileName, null, null, true), StandardCharsets.UTF_8);
|
||||
fail("File '" + fileName + "' was unexpectedly found in ZooKeeper. Content starts with '"
|
||||
+ rawContent.substring(0, 100) + " [...]'");
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.junit.Test;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Arrays;
|
||||
import java.util.Deque;
|
||||
|
@ -1030,9 +1031,9 @@ public class TestRecovery extends SolrTestCaseJ4 {
|
|||
raf.close();
|
||||
|
||||
// Now make a newer log file with just the IDs changed. NOTE: this may not work if log format changes too much!
|
||||
findReplace("AAAAAA".getBytes("UTF-8"), "aaaaaa".getBytes("UTF-8"), content);
|
||||
findReplace("BBBBBB".getBytes("UTF-8"), "bbbbbb".getBytes("UTF-8"), content);
|
||||
findReplace("CCCCCC".getBytes("UTF-8"), "cccccc".getBytes("UTF-8"), content);
|
||||
findReplace("AAAAAA".getBytes(StandardCharsets.UTF_8), "aaaaaa".getBytes(StandardCharsets.UTF_8), content);
|
||||
findReplace("BBBBBB".getBytes(StandardCharsets.UTF_8), "bbbbbb".getBytes(StandardCharsets.UTF_8), content);
|
||||
findReplace("CCCCCC".getBytes(StandardCharsets.UTF_8), "cccccc".getBytes(StandardCharsets.UTF_8), content);
|
||||
|
||||
// WARNING... assumes format of .00000n where n is less than 9
|
||||
long logNumber = Long.parseLong(fname.substring(fname.lastIndexOf(".") + 1));
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Arrays;
|
||||
import java.util.Deque;
|
||||
|
@ -1030,9 +1031,9 @@ public class TestRecoveryHdfs extends SolrTestCaseJ4 {
|
|||
dis.close();
|
||||
|
||||
// Now make a newer log file with just the IDs changed. NOTE: this may not work if log format changes too much!
|
||||
findReplace("AAAAAA".getBytes("UTF-8"), "aaaaaa".getBytes("UTF-8"), content);
|
||||
findReplace("BBBBBB".getBytes("UTF-8"), "bbbbbb".getBytes("UTF-8"), content);
|
||||
findReplace("CCCCCC".getBytes("UTF-8"), "cccccc".getBytes("UTF-8"), content);
|
||||
findReplace("AAAAAA".getBytes(StandardCharsets.UTF_8), "aaaaaa".getBytes(StandardCharsets.UTF_8), content);
|
||||
findReplace("BBBBBB".getBytes(StandardCharsets.UTF_8), "bbbbbb".getBytes(StandardCharsets.UTF_8), content);
|
||||
findReplace("CCCCCC".getBytes(StandardCharsets.UTF_8), "cccccc".getBytes(StandardCharsets.UTF_8), content);
|
||||
|
||||
// WARNING... assumes format of .00000n where n is less than 9
|
||||
long logNumber = Long.parseLong(fname.substring(fname.lastIndexOf(".") + 1));
|
||||
|
|
|
@ -28,9 +28,11 @@ import org.apache.solr.common.util.NamedList;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.Ignore;
|
||||
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
@ -49,11 +51,12 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
|
||||
String base = "external_foo_extf";
|
||||
static long start = System.currentTimeMillis();
|
||||
void makeExternalFile(String field, String contents, String charset) {
|
||||
|
||||
void makeExternalFile(String field, String contents) {
|
||||
String dir = h.getCore().getDataDir();
|
||||
String filename = dir + "/external_" + field + "." + (start++);
|
||||
try {
|
||||
Writer out = new OutputStreamWriter(new FileOutputStream(filename), charset);
|
||||
Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8);
|
||||
out.write(contents);
|
||||
out.close();
|
||||
} catch (Exception e) {
|
||||
|
@ -219,7 +222,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
createIndex(null,ids);
|
||||
|
||||
// Unsorted field, largest first
|
||||
makeExternalFile(field, "54321=543210\n0=-999\n25=250","UTF-8");
|
||||
makeExternalFile(field, "54321=543210\n0=-999\n25=250");
|
||||
// test identity (straight field value)
|
||||
singleTest(field, "\0", 54321, 543210, 0,-999, 25,250, 100, 1);
|
||||
Object orig = FileFloatSource.onlyForTesting;
|
||||
|
@ -229,7 +232,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
singleTest(field, "sqrt(\0)");
|
||||
assertTrue(orig == FileFloatSource.onlyForTesting);
|
||||
|
||||
makeExternalFile(field, "0=1","UTF-8");
|
||||
makeExternalFile(field, "0=1");
|
||||
assertU(h.query("/reloadCache",lrf.makeRequest("","")));
|
||||
singleTest(field, "sqrt(\0)");
|
||||
assertTrue(orig != FileFloatSource.onlyForTesting);
|
||||
|
@ -263,7 +266,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
for (int j=0; j<len; j++) {
|
||||
sb.append("" + ids[j] + "=" + vals[j]+"\n");
|
||||
}
|
||||
makeExternalFile(field, sb.toString(),"UTF-8");
|
||||
makeExternalFile(field, sb.toString());
|
||||
|
||||
// make it visible
|
||||
assertU(h.query("/reloadCache",lrf.makeRequest("","")));
|
||||
|
@ -294,7 +297,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
assertU(adoc("id", "992", keyField, "BBB"));
|
||||
assertU(adoc("id", "993", keyField, "CCC=CCC"));
|
||||
assertU(commit());
|
||||
makeExternalFile(extField, "AAA=AAA=543210\nBBB=-8\nCCC=CCC=250","UTF-8");
|
||||
makeExternalFile(extField, "AAA=AAA=543210\nBBB=-8\nCCC=CCC=250");
|
||||
singleTest(extField,"\0",991,543210,992,-8,993,250);
|
||||
}
|
||||
|
||||
|
@ -306,7 +309,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
assertU(adoc("id", "992", keyField, "92"));
|
||||
assertU(adoc("id", "993", keyField, "93"));
|
||||
assertU(commit());
|
||||
makeExternalFile(extField, "91=543210\n92=-8\n93=250\n=67","UTF-8");
|
||||
makeExternalFile(extField, "91=543210\n92=-8\n93=250\n=67");
|
||||
singleTest(extField,"\0",991,543210,992,-8,993,250);
|
||||
}
|
||||
|
||||
|
@ -624,7 +627,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
createIndex(null,ids);
|
||||
|
||||
// Unsorted field, largest first
|
||||
makeExternalFile(field, "54321=543210\n0=-999\n25=250","UTF-8");
|
||||
makeExternalFile(field, "54321=543210\n0=-999\n25=250");
|
||||
// test identity (straight field value)
|
||||
singleTest(fieldAsFunc, "\0", 54321, 543210, 0,-999, 25,250, 100, 1);
|
||||
Object orig = FileFloatSource.onlyForTesting;
|
||||
|
@ -634,7 +637,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
singleTest(fieldAsFunc, "sqrt(\0)");
|
||||
assertTrue(orig == FileFloatSource.onlyForTesting);
|
||||
|
||||
makeExternalFile(field, "0=1","UTF-8");
|
||||
makeExternalFile(field, "0=1");
|
||||
assertU(adoc("id", "10000")); // will get same reader if no index change
|
||||
assertU(commit());
|
||||
singleTest(fieldAsFunc, "sqrt(\0)");
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.solr.servlet;
|
|||
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.http.HttpResponse;
|
||||
|
@ -49,7 +50,7 @@ public abstract class CacheHeaderTestBase extends SolrJettyTestBase {
|
|||
}
|
||||
|
||||
URI uri = URI.create(httpserver.getBaseURL() + "/select?" +
|
||||
URLEncodedUtils.format(qparams, "UTF-8"));
|
||||
URLEncodedUtils.format(qparams, StandardCharsets.UTF_8));
|
||||
|
||||
if ("GET".equals(method)) {
|
||||
m = new HttpGet(uri);
|
||||
|
@ -72,7 +73,7 @@ public abstract class CacheHeaderTestBase extends SolrJettyTestBase {
|
|||
}
|
||||
|
||||
URI uri = URI.create(httpserver.getBaseURL() + "/update?" +
|
||||
URLEncodedUtils.format(qparams, "UTF-8"));
|
||||
URLEncodedUtils.format(qparams, StandardCharsets.UTF_8));
|
||||
|
||||
if ("GET".equals(method)) {
|
||||
m=new HttpGet(uri);
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.solr.update;
|
|||
*/
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -39,6 +40,7 @@ import org.junit.Test;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
|
||||
|
@ -326,9 +328,9 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
|
|||
|
||||
private List<DocRouter.Range> getRanges(String id1, String id2) throws UnsupportedEncodingException {
|
||||
// find minHash/maxHash hash ranges
|
||||
byte[] bytes = id1.getBytes("UTF-8");
|
||||
byte[] bytes = id1.getBytes(StandardCharsets.UTF_8);
|
||||
int minHash = Hash.murmurhash3_x86_32(bytes, 0, bytes.length, 0);
|
||||
bytes = id2.getBytes("UTF-8");
|
||||
bytes = id2.getBytes(StandardCharsets.UTF_8);
|
||||
int maxHash = Hash.murmurhash3_x86_32(bytes, 0, bytes.length, 0);
|
||||
|
||||
if (minHash > maxHash) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.InputStream;
|
|||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
@ -213,7 +214,7 @@ public class SimplePostToolTest extends SolrTestCaseJ4 {
|
|||
sb.append("Disallow: /disallow # Disallow this path\n");
|
||||
sb.append("Disallow: /nonexistingpath # Disallow this path\n");
|
||||
this.robotsCache.put("[ff01::114]", SimplePostTool.pageFetcher.
|
||||
parseRobotsTxt(new ByteArrayInputStream(sb.toString().getBytes("UTF-8"))));
|
||||
parseRobotsTxt(new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -225,11 +226,7 @@ public class SimplePostToolTest extends SolrTestCaseJ4 {
|
|||
}
|
||||
res.httpStatus = 200;
|
||||
res.contentType = "text/html";
|
||||
try {
|
||||
res.content = htmlMap.get(u.toString()).getBytes("UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException();
|
||||
}
|
||||
res.content = htmlMap.get(u.toString()).getBytes(StandardCharsets.UTF_8);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
|
|
@ -198,7 +198,7 @@ public class ConcurrentUpdateSolrServer extends SolrServer {
|
|||
byte[] content = String.format(Locale.ROOT,
|
||||
fmt,
|
||||
params.getBool(UpdateParams.WAIT_SEARCHER, false)
|
||||
+ "").getBytes("UTF-8");
|
||||
+ "").getBytes(StandardCharsets.UTF_8);
|
||||
out.write(content);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -336,7 +336,7 @@ public class HttpSolrServer extends SolrServer {
|
|||
if (vals != null) {
|
||||
for (String v : vals) {
|
||||
if (isMultipart) {
|
||||
parts.add(new FormBodyPart(p, new StringBody(v, Charset.forName("UTF-8"))));
|
||||
parts.add(new FormBodyPart(p, new StringBody(v, StandardCharsets.UTF_8)));
|
||||
} else {
|
||||
postParams.add(new BasicNameValuePair(p, v));
|
||||
}
|
||||
|
@ -370,7 +370,7 @@ public class HttpSolrServer extends SolrServer {
|
|||
post.setEntity(entity);
|
||||
} else {
|
||||
//not using multipart
|
||||
post.setEntity(new UrlEncodedFormEntity(postParams, "UTF-8"));
|
||||
post.setEntity(new UrlEncodedFormEntity(postParams, StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
method = post;
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.util.Collection;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* A RequestWriter is used to write requests to Solr.
|
||||
|
@ -38,7 +39,7 @@ import java.nio.charset.Charset;
|
|||
* @since solr 1.4
|
||||
*/
|
||||
public class RequestWriter {
|
||||
public static final Charset UTF_8 = Charset.forName("UTF-8");
|
||||
public static final Charset UTF_8 = StandardCharsets.UTF_8;
|
||||
|
||||
public Collection<ContentStream> getContentStreams(SolrRequest req) throws IOException {
|
||||
if (req instanceof UpdateRequest) {
|
||||
|
|
|
@ -41,6 +41,7 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -102,7 +103,7 @@ public class NoOpResponseParserTest extends SolrJettyTestBase {
|
|||
NoOpResponseParser parser = new NoOpResponseParser();
|
||||
try (final InputStream is = getResponse()) {
|
||||
assertNotNull(is);
|
||||
Reader in = new InputStreamReader(is, "UTF-8");
|
||||
Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||
NamedList<Object> response = parser.processResponse(in);
|
||||
assertNotNull(response.get("response"));
|
||||
String expectedResponse = IOUtils.toString(getResponse(), "UTF-8");
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.solr.client.solrj.response;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.solr.client.solrj.impl.XMLResponseParser;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
|
@ -29,6 +30,7 @@ import org.junit.Test;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -42,7 +44,7 @@ public class QueryResponseTest extends LuceneTestCase {
|
|||
XMLResponseParser parser = new XMLResponseParser();
|
||||
InputStream is = new SolrResourceLoader(null, null).openResource("solrj/sampleDateFacetResponse.xml");
|
||||
assertNotNull(is);
|
||||
Reader in = new InputStreamReader(is, "UTF-8");
|
||||
Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||
NamedList<Object> response = parser.processResponse(in);
|
||||
in.close();
|
||||
|
||||
|
@ -66,7 +68,7 @@ public class QueryResponseTest extends LuceneTestCase {
|
|||
XMLResponseParser parser = new XMLResponseParser();
|
||||
InputStream is = new SolrResourceLoader(null, null).openResource("solrj/sampleDateFacetResponse.xml");
|
||||
assertNotNull(is);
|
||||
Reader in = new InputStreamReader(is, "UTF-8");
|
||||
Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||
NamedList<Object> response = parser.processResponse(in);
|
||||
in.close();
|
||||
|
||||
|
@ -123,7 +125,7 @@ public class QueryResponseTest extends LuceneTestCase {
|
|||
XMLResponseParser parser = new XMLResponseParser();
|
||||
InputStream is = new SolrResourceLoader(null, null).openResource("solrj/sampleGroupResponse.xml");
|
||||
assertNotNull(is);
|
||||
Reader in = new InputStreamReader(is, "UTF-8");
|
||||
Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||
NamedList<Object> response = parser.processResponse(in);
|
||||
in.close();
|
||||
|
||||
|
@ -225,7 +227,7 @@ public class QueryResponseTest extends LuceneTestCase {
|
|||
XMLResponseParser parser = new XMLResponseParser();
|
||||
InputStream is = new SolrResourceLoader(null, null).openResource("solrj/sampleSimpleGroupResponse.xml");
|
||||
assertNotNull(is);
|
||||
Reader in = new InputStreamReader(is, "UTF-8");
|
||||
Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||
NamedList<Object> response = parser.processResponse(in);
|
||||
in.close();
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.io.InputStream;
|
|||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
|
@ -57,7 +58,7 @@ public class ContentStreamTest extends SolrTestCaseJ4
|
|||
InputStream s = stream.getStream();
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
InputStreamReader isr = new InputStreamReader(
|
||||
new FileInputStream(file), "UTF-8");
|
||||
new FileInputStream(file), StandardCharsets.UTF_8);
|
||||
Reader r = stream.getReader();
|
||||
try {
|
||||
assertEquals(file.length(), stream.getSize().intValue());
|
||||
|
@ -87,7 +88,7 @@ public class ContentStreamTest extends SolrTestCaseJ4
|
|||
InputStream s = stream.getStream();
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
FileInputStream fis2 = new FileInputStream(file);
|
||||
InputStreamReader isr = new InputStreamReader(fis, "UTF-8");
|
||||
InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8);
|
||||
Reader r = stream.getReader();
|
||||
try {
|
||||
assertTrue(IOUtils.contentEquals(fis2, s));
|
||||
|
|
|
@ -28,10 +28,12 @@ import javax.xml.xpath.XPath;
|
|||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
abstract public class BaseTestHarness {
|
||||
private static final ThreadLocal<DocumentBuilder> builderTL = new ThreadLocal<>();
|
||||
|
@ -80,7 +82,7 @@ abstract public class BaseTestHarness {
|
|||
Document document = null;
|
||||
try {
|
||||
document = getXmlDocumentBuilder().parse(new ByteArrayInputStream
|
||||
(xml.getBytes("UTF-8")));
|
||||
(xml.getBytes(StandardCharsets.UTF_8)));
|
||||
} catch (UnsupportedEncodingException e1) {
|
||||
throw new RuntimeException("Totally weird UTF-8 exception", e1);
|
||||
} catch (IOException e2) {
|
||||
|
@ -105,7 +107,7 @@ abstract public class BaseTestHarness {
|
|||
Document document = null;
|
||||
try {
|
||||
document = getXmlDocumentBuilder().parse(new ByteArrayInputStream
|
||||
(xml.getBytes("UTF-8")));
|
||||
(xml.getBytes(StandardCharsets.UTF_8)));
|
||||
} catch (UnsupportedEncodingException e1) {
|
||||
throw new RuntimeException("Totally weird UTF-8 exception", e1);
|
||||
} catch (IOException e2) {
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.solr.util;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
|
@ -106,7 +107,7 @@ public class RestTestHarness extends BaseTestHarness {
|
|||
public String put(String request, String content) throws IOException {
|
||||
HttpPut httpPut = new HttpPut(getBaseURL() + request);
|
||||
httpPut.setEntity(new StringEntity(content, ContentType.create(
|
||||
"application/json", "utf-8")));
|
||||
"application/json", StandardCharsets.UTF_8)));
|
||||
|
||||
return getResponse(httpPut);
|
||||
}
|
||||
|
@ -134,7 +135,7 @@ public class RestTestHarness extends BaseTestHarness {
|
|||
public String post(String request, String content) throws IOException {
|
||||
HttpPost httpPost = new HttpPost(getBaseURL() + request);
|
||||
httpPost.setEntity(new StringEntity(content, ContentType.create(
|
||||
"application/json", "utf-8")));
|
||||
"application/json", StandardCharsets.UTF_8)));
|
||||
|
||||
return getResponse(httpPost);
|
||||
}
|
||||
|
@ -189,7 +190,7 @@ public class RestTestHarness extends BaseTestHarness {
|
|||
HttpEntity entity = null;
|
||||
try {
|
||||
entity = httpClient.execute(request).getEntity();
|
||||
return EntityUtils.toString(entity, "UTF-8");
|
||||
return EntityUtils.toString(entity, StandardCharsets.UTF_8);
|
||||
} finally {
|
||||
EntityUtils.consumeQuietly(entity);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue