Remove usage and add some legacy java.util classes to forbiddenapis (Stack, Hashtable, Vector) (#12404)

This commit is contained in:
Uwe Schindler 2023-06-29 16:56:41 +02:00 committed by GitHub
parent f44cc45cf8
commit e503805758
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 43 additions and 35 deletions

View File

@ -37,6 +37,11 @@ java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone
java.io.File#delete() @ use Files.delete for real exception, IOUtils.deleteFilesIgnoringExceptions if you dont care
java.util.Collections#shuffle(java.util.List) @ Use shuffle(List, Random) instead so that it can be reproduced
java.util.Stack @ Use more modern java.util.ArrayDeque as it is not synchronized
java.util.Vector @ Use more modern java.util.ArrayList as it is not synchronized
# TODO (needs some fix in forbiddenapis): this also hits java.util.Properties:
# java.util.Hashtable @ Use more modern java.util.HashMap as it is not synchronized
java.util.Locale#forLanguageTag(java.lang.String) @ use new Locale.Builder().setLanguageTag(...).build() which has error handling
java.util.Locale#toString() @ use Locale#toLanguageTag() for a standardized BCP47 locale name

View File

@ -149,7 +149,9 @@ Bug Fixes
Other
---------------------
(No changes)
* GITHUB#12404: Remove usage and add some legacy java.util classes to forbiddenapis (Stack, Hashtable, Vector).
(Uwe Schindler)
======================== Lucene 9.7.0 =======================

View File

@ -17,8 +17,9 @@
package org.apache.lucene.analysis.compound.hyphenation;
import java.io.PrintStream;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Enumeration;
import java.util.Stack;
/**
*
@ -457,20 +458,20 @@ public class TernaryTree implements Cloneable {
}
/** Node stack */
Stack<Item> ns;
Deque<Item> ns;
/** key stack implemented with a StringBuilder */
StringBuilder ks;
public Iterator() {
cur = -1;
ns = new Stack<>();
ns = new ArrayDeque<>();
ks = new StringBuilder();
rewind();
}
public void rewind() {
ns.removeAllElements();
ns.clear();
ks.setLength(0);
cur = root;
run();
@ -501,7 +502,7 @@ public class TernaryTree implements Cloneable {
Item i = new Item();
int res = 0;
if (ns.empty()) {
if (ns.isEmpty()) {
return -1;
}
@ -538,7 +539,7 @@ public class TernaryTree implements Cloneable {
break;
default:
if (ns.empty()) {
if (ns.isEmpty()) {
return -1;
}
climb = true;

View File

@ -26,12 +26,12 @@ import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Stack;
import org.apache.lucene.benchmark.byTask.utils.Config;
/**
@ -81,7 +81,7 @@ public class DirContentSource extends ContentSource {
int count = 0;
Stack<Path> stack = new Stack<>();
ArrayDeque<Path> stack = new ArrayDeque<>();
/* this seems silly ... there must be a better way ...
not that this is good, but can it matter? */
@ -93,7 +93,7 @@ public class DirContentSource extends ContentSource {
}
void find() throws IOException {
if (stack.empty()) {
if (stack.isEmpty()) {
return;
}
if (!Files.isDirectory(stack.peek())) {

View File

@ -17,6 +17,7 @@
package org.apache.lucene.facet.sortedset;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -25,7 +26,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Stack;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType;
@ -112,7 +112,7 @@ public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesRead
List<Integer> siblings = new ArrayList<>();
// stack of paths with unfulfilled siblings
Stack<OrdAndComponent> siblingStack = new Stack<>();
ArrayDeque<OrdAndComponent> siblingStack = new ArrayDeque<>();
int dimEndOrd = dimStartOrd;
@ -125,7 +125,7 @@ public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesRead
int ord = dimEndOrd - dimStartOrd;
while (siblingStack.empty() == false
while (siblingStack.isEmpty() == false
&& siblingStack.peek().component.length >= components.length) {
OrdAndComponent possibleSibling = siblingStack.pop();
if (possibleSibling.component.length == components.length) {

View File

@ -17,6 +17,7 @@
package org.apache.lucene.facet.sortedset;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -25,7 +26,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
@ -1683,13 +1683,13 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Dfs through top children
for (FacetResult dimResult : actualAllDims) {
if (config.getDimConfig(dimResult.dim).hierarchical) {
Stack<String[]> stack = new Stack<>();
ArrayDeque<String[]> stack = new ArrayDeque<>();
for (LabelAndValue labelAndValue : dimResult.labelValues) {
String[] path = new String[1];
path[0] = labelAndValue.label;
stack.add(path);
}
while (stack.empty() == false) {
while (stack.isEmpty() == false) {
String[] currPath = stack.pop();
FacetResult expectedResult =
getFacetResultForPath(expected, dimResult.dim, currPath);

View File

@ -17,9 +17,10 @@
package org.apache.lucene.misc.search;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashMap;
import java.util.Map;
import java.util.Stack;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey;
@ -70,7 +71,7 @@ public abstract class DiversifiedTopDocsCollector extends TopDocsCollector<Score
private int numHits;
private Map<Long, ScoreDocKeyQueue> perKeyQueues;
protected int maxNumPerKey;
private Stack<ScoreDocKeyQueue> sparePerKeyQueues = new Stack<>();
private Deque<ScoreDocKeyQueue> sparePerKeyQueues = new ArrayDeque<>();
public DiversifiedTopDocsCollector(int numHits, int maxHitsPerKey) {
super(new ScoreDocKeyQueue(numHits));

View File

@ -17,7 +17,7 @@
package org.apache.lucene.queryparser.flexible.core.nodes;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
@ -36,7 +36,7 @@ public abstract class QueryNodeImpl implements QueryNode, Cloneable {
private boolean isLeaf = true;
private Hashtable<String, Object> tags = new Hashtable<>();
private HashMap<String, Object> tags = new HashMap<>();
private List<QueryNode> clauses = null;
@ -115,7 +115,7 @@ public abstract class QueryNodeImpl implements QueryNode, Cloneable {
clone.isLeaf = this.isLeaf;
// Reset all tags
clone.tags = new Hashtable<>();
clone.tags = new HashMap<>();
// copy children
if (this.clauses != null) {

View File

@ -16,8 +16,8 @@
*/
package org.apache.lucene.search.suggest.tst;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Stack;
/**
* Ternary Search Trie implementation.
@ -126,9 +126,9 @@ public class TSTAutocomplete {
}
p = p.eqKid;
Stack<TernaryTreeNode> st = new Stack<>();
ArrayDeque<TernaryTreeNode> st = new ArrayDeque<>();
st.push(p);
while (!st.empty()) {
while (!st.isEmpty()) {
TernaryTreeNode top = st.peek();
st.pop();
if (top.token != null) {

View File

@ -16,8 +16,8 @@
*/
package org.apache.lucene.tests.util;
import java.util.Arrays;
import java.util.Stack;
import java.util.ArrayList;
import java.util.List;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
@ -35,7 +35,7 @@ import org.junit.runners.model.Statement;
* This should be the case from JUnit 4.10 on.
*/
public class TestJUnitRuleOrder extends WithNestedTests {
static Stack<String> stack;
static ArrayList<String> stack;
public TestJUnitRuleOrder() {
super(true);
@ -44,12 +44,12 @@ public class TestJUnitRuleOrder extends WithNestedTests {
public static class Nested extends WithNestedTests.AbstractNestedTest {
@Before
public void before() {
stack.push("@Before");
stack.add("@Before");
}
@After
public void after() {
stack.push("@After");
stack.add("@After");
}
@Rule
@ -60,9 +60,9 @@ public class TestJUnitRuleOrder extends WithNestedTests {
return new Statement() {
@Override
public void evaluate() throws Throwable {
stack.push("@Rule before");
stack.add("@Rule before");
base.evaluate();
stack.push("@Rule after");
stack.add("@Rule after");
}
};
}
@ -75,12 +75,12 @@ public class TestJUnitRuleOrder extends WithNestedTests {
@BeforeClass
public static void beforeClassCleanup() {
stack = new Stack<>();
stack = new ArrayList<>();
}
@AfterClass
public static void afterClassCheck() {
stack.push("@AfterClass");
stack.add("@AfterClass");
}
}
@ -88,7 +88,6 @@ public class TestJUnitRuleOrder extends WithNestedTests {
public void testRuleOrder() {
JUnitCore.runClasses(Nested.class);
Assert.assertEquals(
Arrays.toString(stack.toArray()),
"[@Rule before, @Before, @After, @Rule after, @AfterClass]");
List.of("@Rule before", "@Before", "@After", "@Rule after", "@AfterClass"), stack);
}
}