mirror of https://github.com/apache/lucene.git
initial commit for java 21
This commit is contained in:
parent
8fa0de2743
commit
1274db1ae6
|
@ -73,7 +73,7 @@ ext {
|
|||
}
|
||||
|
||||
// Minimum Java version required to compile and run Lucene.
|
||||
minJavaVersion = JavaVersion.VERSION_17
|
||||
minJavaVersion = JavaVersion.VERSION_21
|
||||
|
||||
// snapshot build marker used in scripts.
|
||||
snapshotBuild = version.contains("SNAPSHOT")
|
||||
|
@ -101,6 +101,12 @@ dependencies {
|
|||
groovy "org.codehaus.groovy:groovy-all:3.0.12"
|
||||
}
|
||||
|
||||
repositories {
|
||||
maven {
|
||||
url "https://repo.eclipse.org/content/repositories/eclipse-snapshots/"
|
||||
}
|
||||
}
|
||||
|
||||
apply from: file('buildSrc/scriptDepVersions.gradle')
|
||||
|
||||
// Include smaller chunks configuring dedicated build areas.
|
||||
|
|
|
@ -24,7 +24,7 @@ ext {
|
|||
"apache-rat": "0.14",
|
||||
"asm": "9.6",
|
||||
"commons-codec": "1.13",
|
||||
"ecj": "3.30.0",
|
||||
"ecj": "3.36.0-SNAPSHOT",
|
||||
"flexmark": "0.61.24",
|
||||
"javacc": "7.0.12",
|
||||
"jflex": "1.8.2",
|
||||
|
|
|
@ -20,7 +20,7 @@ import java.util.Locale;
|
|||
|
||||
/** A stemmer for Brazilian Portuguese words. */
|
||||
class BrazilianStemmer {
|
||||
private static final Locale locale = new Locale("pt", "BR");
|
||||
private static final Locale locale = Locale.of("pt", "BR");
|
||||
|
||||
/** Changed term */
|
||||
private String TERM;
|
||||
|
|
|
@ -33,7 +33,7 @@ class GermanStemmer {
|
|||
/** Amount of characters that are removed with <code>substitute()</code> while stemming. */
|
||||
private int substCount = 0;
|
||||
|
||||
private static final Locale locale = new Locale("de", "DE");
|
||||
private static final Locale locale = Locale.of("de", "DE");
|
||||
|
||||
/**
|
||||
* Stemms the given term to an unique <code>discriminator</code>.
|
||||
|
|
|
@ -38,7 +38,7 @@ public class ThaiTokenizer extends SegmentingTokenizerBase {
|
|||
*/
|
||||
public static final boolean DBBI_AVAILABLE;
|
||||
|
||||
private static final BreakIterator proto = BreakIterator.getWordInstance(new Locale("th"));
|
||||
private static final BreakIterator proto = BreakIterator.getWordInstance(Locale.of("th"));
|
||||
|
||||
static {
|
||||
// check that we have a working dictionary-based break iterator for thai
|
||||
|
|
|
@ -26,7 +26,7 @@ public class TestCollationKeyAnalyzer extends CollationTestBase {
|
|||
// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
|
||||
// RuleBasedCollator. However, the Arabic Locale seems to order the Farsi
|
||||
// characters properly.
|
||||
private Collator collator = Collator.getInstance(new Locale("ar"));
|
||||
private Collator collator = Collator.getInstance(Locale.of("ar"));
|
||||
private Analyzer analyzer;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
|||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -111,7 +112,7 @@ public class GenerateJflexTLDMacros {
|
|||
|
||||
public GenerateJflexTLDMacros(String tldFileURL, String jflexFile, String tldListFile)
|
||||
throws Exception {
|
||||
this.tldFileURL = new URL(tldFileURL);
|
||||
this.tldFileURL = URI.create(tldFileURL).toURL();
|
||||
this.jflexMacroFile = Paths.get(jflexFile);
|
||||
this.tldListFile = Paths.get(tldListFile);
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
|
||||
public class TestICUCollationKeyAnalyzer extends CollationTestBase {
|
||||
|
||||
private Collator collator = Collator.getInstance(new Locale("fa"));
|
||||
private Collator collator = Collator.getInstance(Locale.of("fa"));
|
||||
private Analyzer analyzer;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.io.InputStreamReader;
|
|||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -155,19 +156,19 @@ public class GenerateUTR30DataFiles {
|
|||
}
|
||||
|
||||
private static void getNFKCDataFilesFromIcuProject(String releaseTag) throws IOException {
|
||||
URL icuTagsURL = new URL(ICU_GIT_TAG_URL + "/");
|
||||
URL icuReleaseTagURL = new URL(icuTagsURL, releaseTag + "/");
|
||||
URL norm2url = new URL(icuReleaseTagURL, ICU_DATA_NORM2_PATH + "/");
|
||||
URI icuTagsURL = URI.create(ICU_GIT_TAG_URL + "/");
|
||||
URI icuReleaseTagURL = icuTagsURL.resolve(releaseTag + "/");
|
||||
URI norm2url = icuReleaseTagURL.resolve(ICU_DATA_NORM2_PATH + "/");
|
||||
|
||||
System.err.print("Downloading " + NFKC_TXT + " ... ");
|
||||
download(new URL(norm2url, NFKC_TXT), NFKC_TXT);
|
||||
download(norm2url.resolve(NFKC_TXT), NFKC_TXT);
|
||||
System.err.println("done.");
|
||||
System.err.print("Downloading " + NFKC_CF_TXT + " ... ");
|
||||
download(new URL(norm2url, NFKC_CF_TXT), NFKC_CF_TXT);
|
||||
download(norm2url.resolve(NFKC_CF_TXT), NFKC_CF_TXT);
|
||||
System.err.println("done.");
|
||||
|
||||
System.err.print("Downloading " + NFKC_CF_TXT + " and making diacritic rules one-way ... ");
|
||||
URLConnection connection = openConnection(new URL(norm2url, NFC_TXT));
|
||||
URLConnection connection = openConnection(norm2url.resolve(NFC_TXT).toURL());
|
||||
try (BufferedReader reader =
|
||||
new BufferedReader(
|
||||
new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8));
|
||||
|
@ -210,8 +211,8 @@ public class GenerateUTR30DataFiles {
|
|||
System.err.println("done.");
|
||||
}
|
||||
|
||||
private static void download(URL url, String outputFile) throws IOException {
|
||||
final URLConnection connection = openConnection(url);
|
||||
private static void download(URI uri, String outputFile) throws IOException {
|
||||
final URLConnection connection = openConnection(uri.toURL());
|
||||
try (InputStream inputStream = connection.getInputStream();
|
||||
OutputStream outputStream = Files.newOutputStream(Path.of(outputFile))) {
|
||||
inputStream.transferTo(outputStream);
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.lucene.store.ByteBuffersDirectory;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.SuppressForbidden;
|
||||
|
||||
/**
|
||||
* Data maintained by a performance test run.
|
||||
|
@ -204,7 +205,7 @@ public class PerfRunData implements Closeable {
|
|||
resetInputs();
|
||||
|
||||
// release unused stuff
|
||||
System.runFinalization();
|
||||
runFinalization();
|
||||
System.gc();
|
||||
|
||||
// Re-init clock
|
||||
|
@ -482,4 +483,10 @@ public class PerfRunData implements Closeable {
|
|||
public Map<String, AnalyzerFactory> getAnalyzerFactories() {
|
||||
return analyzerFactories;
|
||||
}
|
||||
|
||||
@SuppressWarnings("removal")
|
||||
@SuppressForbidden(reason = "requires to run finalization")
|
||||
private static void runFinalization() {
|
||||
System.runFinalization();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ public class NewLocaleTask extends PerfTask {
|
|||
private String variant;
|
||||
|
||||
/**
|
||||
* Create a new {@link java.util.Locale} and set it it in the getRunData() for use by all future
|
||||
* Create a new {@link java.util.Locale} and set it in the getRunData() for use by all future
|
||||
* tasks.
|
||||
*/
|
||||
public NewLocaleTask(PerfRunData runData) {
|
||||
|
@ -52,7 +52,7 @@ public class NewLocaleTask extends PerfTask {
|
|||
String lang = language;
|
||||
if (lang.equalsIgnoreCase("ROOT")) lang = ""; // empty language is the root locale in the JDK
|
||||
|
||||
return new Locale(lang, country, variant);
|
||||
return Locale.of(lang, country, variant);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -793,19 +793,19 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
|
||||
// ROOT locale
|
||||
benchmark = execBenchmark(getLocaleConfig("ROOT"));
|
||||
assertEquals(new Locale(""), benchmark.getRunData().getLocale());
|
||||
assertEquals(Locale.of(""), benchmark.getRunData().getLocale());
|
||||
|
||||
// specify just a language
|
||||
benchmark = execBenchmark(getLocaleConfig("de"));
|
||||
assertEquals(new Locale("de"), benchmark.getRunData().getLocale());
|
||||
assertEquals(Locale.of("de"), benchmark.getRunData().getLocale());
|
||||
|
||||
// specify language + country
|
||||
benchmark = execBenchmark(getLocaleConfig("en,US"));
|
||||
assertEquals(new Locale("en", "US"), benchmark.getRunData().getLocale());
|
||||
assertEquals(Locale.of("en", "US"), benchmark.getRunData().getLocale());
|
||||
|
||||
// specify language + country + variant
|
||||
benchmark = execBenchmark(getLocaleConfig("no,NO,NY"));
|
||||
assertEquals(new Locale("no", "NO", "NY"), benchmark.getRunData().getLocale());
|
||||
assertEquals(Locale.of("no", "NO", "NY"), benchmark.getRunData().getLocale());
|
||||
}
|
||||
|
||||
private String[] getLocaleConfig(String localeParam) {
|
||||
|
@ -832,22 +832,22 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
public void testCollator() throws Exception {
|
||||
// ROOT locale
|
||||
Benchmark benchmark = execBenchmark(getCollatorConfig("ROOT", "impl:jdk"));
|
||||
CollationKeyAnalyzer expected = new CollationKeyAnalyzer(Collator.getInstance(new Locale("")));
|
||||
CollationKeyAnalyzer expected = new CollationKeyAnalyzer(Collator.getInstance(Locale.of("")));
|
||||
assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar");
|
||||
|
||||
// specify just a language
|
||||
benchmark = execBenchmark(getCollatorConfig("de", "impl:jdk"));
|
||||
expected = new CollationKeyAnalyzer(Collator.getInstance(new Locale("de")));
|
||||
expected = new CollationKeyAnalyzer(Collator.getInstance(Locale.of("de")));
|
||||
assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar");
|
||||
|
||||
// specify language + country
|
||||
benchmark = execBenchmark(getCollatorConfig("en,US", "impl:jdk"));
|
||||
expected = new CollationKeyAnalyzer(Collator.getInstance(new Locale("en", "US")));
|
||||
expected = new CollationKeyAnalyzer(Collator.getInstance(Locale.of("en", "US")));
|
||||
assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar");
|
||||
|
||||
// specify language + country + variant
|
||||
benchmark = execBenchmark(getCollatorConfig("no,NO,NY", "impl:jdk"));
|
||||
expected = new CollationKeyAnalyzer(Collator.getInstance(new Locale("no", "NO", "NY")));
|
||||
expected = new CollationKeyAnalyzer(Collator.getInstance(Locale.of("no", "NO", "NY")));
|
||||
assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar");
|
||||
}
|
||||
|
||||
|
|
|
@ -96,7 +96,7 @@ public class TestHtmlParser extends LuceneTestCase {
|
|||
public void testTurkish() throws Exception {
|
||||
final Locale saved = Locale.getDefault();
|
||||
try {
|
||||
Locale.setDefault(new Locale("tr", "TR"));
|
||||
Locale.setDefault(Locale.of("tr", "TR"));
|
||||
String text =
|
||||
"<html><HEAD><TITLE>ııı</TITLE></head><body>"
|
||||
+ "<IMG SRC=\"../images/head.jpg\" WIDTH=570 HEIGHT=47 BORDER=0 ALT=\"ş\">"
|
||||
|
|
|
@ -119,7 +119,7 @@ public class TestWeakIdentityMap extends LuceneTestCase {
|
|||
int size = map.size();
|
||||
for (int i = 0; size > 0 && i < 10; i++)
|
||||
try {
|
||||
System.runFinalization();
|
||||
runFinalization();
|
||||
System.gc();
|
||||
int newSize = map.size();
|
||||
assertTrue("previousSize(" + size + ")>=newSize(" + newSize + ")", size >= newSize);
|
||||
|
@ -232,7 +232,7 @@ public class TestWeakIdentityMap extends LuceneTestCase {
|
|||
int size = map.size();
|
||||
for (int i = 0; size > 0 && i < 10; i++)
|
||||
try {
|
||||
System.runFinalization();
|
||||
runFinalization();
|
||||
System.gc();
|
||||
int newSize = map.size();
|
||||
assertTrue("previousSize(" + size + ")>=newSize(" + newSize + ")", size >= newSize);
|
||||
|
@ -252,4 +252,10 @@ public class TestWeakIdentityMap extends LuceneTestCase {
|
|||
InterruptedException ie) {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("removal")
|
||||
@SuppressForbidden(reason = "requires to run finalization")
|
||||
private static void runFinalization() {
|
||||
System.runFinalization();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -421,7 +421,7 @@ public final class QueryParserPaneProvider implements QueryParserTabOperator {
|
|||
.fuzzyMinSim(fuzzyMinSimFloat)
|
||||
.fuzzyPrefixLength(fuzzyPrefLenInt)
|
||||
.dateResolution(DateTools.Resolution.valueOf((String) dateResCB.getSelectedItem()))
|
||||
.locale(new Locale(locationTF.getText()))
|
||||
.locale(Locale.of(locationTF.getText()))
|
||||
.timeZone(TimeZone.getTimeZone(timezoneTF.getText()))
|
||||
.typeMap(typeMap)
|
||||
.build();
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.awt.event.MouseAdapter;
|
|||
import java.awt.event.MouseEvent;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import javax.swing.JLabel;
|
||||
|
@ -37,7 +38,7 @@ public final class URLLabel extends JLabel {
|
|||
super(text);
|
||||
|
||||
try {
|
||||
this.link = new URL(text);
|
||||
this.link = (URI.create(text)).toURL();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new LukeException(e.getMessage(), e);
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ public class TestNLS extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNLSLoading_xx_XX() {
|
||||
Locale locale = new Locale("xx", "XX", "");
|
||||
Locale locale = Locale.of("xx", "XX", "");
|
||||
String message =
|
||||
NLS.getLocalizedMessage(
|
||||
MessagesTestBundle.Q0004E_INVALID_SYNTAX_ESCAPE_UNICODE_TRUNCATION, locale);
|
||||
|
|
Loading…
Reference in New Issue