mirror of https://github.com/apache/maven.git
[MNG-8014] Fix multithreaded builder (#1386)
This commit is contained in:
parent
6afa4770cb
commit
b6a84fcac6
|
@ -25,9 +25,8 @@
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.concurrent.ForkJoinTask;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -88,7 +87,7 @@
|
|||
@Singleton
|
||||
public class DefaultProjectBuilder implements ProjectBuilder {
|
||||
public static final String BUILDER_PARALLELISM = "maven.projectBuilder.parallelism";
|
||||
public static final int DEFAULT_BUILDER_PARALLELISM = 1;
|
||||
public static final int DEFAULT_BUILDER_PARALLELISM = Runtime.getRuntime().availableProcessors() / 2 + 1;
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
private final ModelBuilder modelBuilder;
|
||||
|
@ -208,14 +207,14 @@ class BuildSession implements AutoCloseable {
|
|||
private final List<RemoteRepository> repositories;
|
||||
private final ReactorModelPool modelPool;
|
||||
private final TransformerContextBuilder transformerContextBuilder;
|
||||
private final ForkJoinPool forkJoinPool;
|
||||
private final ExecutorService executor;
|
||||
|
||||
BuildSession(ProjectBuildingRequest request, boolean localProjects) {
|
||||
this.request = request;
|
||||
this.session =
|
||||
RepositoryUtils.overlay(request.getLocalRepository(), request.getRepositorySession(), repoSystem);
|
||||
this.repositories = RepositoryUtils.toRepos(request.getRemoteRepositories());
|
||||
this.forkJoinPool = new ForkJoinPool(getParallelism(request));
|
||||
this.executor = createExecutor(getParallelism(request));
|
||||
if (localProjects) {
|
||||
this.modelPool = new ReactorModelPool();
|
||||
this.transformerContextBuilder = modelBuilder.newTransformerContextBuilder();
|
||||
|
@ -225,9 +224,42 @@ class BuildSession implements AutoCloseable {
|
|||
}
|
||||
}
|
||||
|
||||
ExecutorService createExecutor(int parallelism) {
|
||||
//
|
||||
// We need an executor that will not block.
|
||||
// We can't use work stealing, as we are building a graph
|
||||
// and this could lead to cycles where a thread waits for
|
||||
// a task to finish, then execute another one which waits
|
||||
// for the initial task...
|
||||
// In order to work around that problem, we override the
|
||||
// invokeAll method, so that whenever the method is called,
|
||||
// the pool core size will be incremented before submitting
|
||||
// all the tasks, then the thread will block waiting for all
|
||||
// those subtasks to finish.
|
||||
// This ensures the number of running workers is no more than
|
||||
// the defined parallism, while making sure the pool will not
|
||||
// be exhausted
|
||||
//
|
||||
return new ThreadPoolExecutor(
|
||||
parallelism, Integer.MAX_VALUE, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>()) {
|
||||
final AtomicInteger parked = new AtomicInteger();
|
||||
|
||||
@Override
|
||||
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
|
||||
throws InterruptedException {
|
||||
setCorePoolSize(parallelism + parked.incrementAndGet());
|
||||
try {
|
||||
return super.invokeAll(tasks);
|
||||
} finally {
|
||||
setCorePoolSize(parallelism + parked.decrementAndGet());
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
this.forkJoinPool.shutdownNow();
|
||||
this.executor.shutdownNow();
|
||||
}
|
||||
|
||||
private int getParallelism(ProjectBuildingRequest request) {
|
||||
|
@ -355,18 +387,7 @@ ProjectBuildingResult build(Artifact artifact, boolean allowStubModel) throws Pr
|
|||
}
|
||||
|
||||
List<ProjectBuildingResult> build(List<File> pomFiles, boolean recursive) throws ProjectBuildingException {
|
||||
ForkJoinTask<List<ProjectBuildingResult>> task = forkJoinPool.submit(() -> doBuild(pomFiles, recursive));
|
||||
|
||||
// ForkJoinTask.getException rewraps the exception in a weird way
|
||||
// which cause an additional layer of exception, so try to unwrap it
|
||||
task.quietlyJoin();
|
||||
if (task.isCompletedAbnormally()) {
|
||||
Throwable e = task.getException();
|
||||
Throwable c = e.getCause();
|
||||
uncheckedThrow(c != null && c.getClass() == e.getClass() ? c : e);
|
||||
}
|
||||
|
||||
List<ProjectBuildingResult> results = task.getRawResult();
|
||||
List<ProjectBuildingResult> results = doBuild(pomFiles, recursive);
|
||||
if (results.stream()
|
||||
.flatMap(r -> r.getProblems().stream())
|
||||
.anyMatch(p -> p.getSeverity() != ModelProblem.Severity.WARNING)) {
|
||||
|
@ -417,14 +438,21 @@ private List<InterimResult> build(
|
|||
Set<File> aggregatorFiles,
|
||||
boolean root,
|
||||
boolean recursive) {
|
||||
List<ForkJoinTask<InterimResult>> tasks = pomFiles.stream()
|
||||
.map(pomFile -> ForkJoinTask.adapt(
|
||||
List<Callable<InterimResult>> tasks = pomFiles.stream()
|
||||
.map(pomFile -> ((Callable<InterimResult>)
|
||||
() -> build(projectIndex, pomFile, concat(aggregatorFiles, pomFile), root, recursive)))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return ForkJoinTask.invokeAll(tasks).stream()
|
||||
.map(ForkJoinTask::getRawResult)
|
||||
.collect(Collectors.toList());
|
||||
try {
|
||||
List<Future<InterimResult>> futures = executor.invokeAll(tasks);
|
||||
List<InterimResult> list = new ArrayList<>();
|
||||
for (Future<InterimResult> future : futures) {
|
||||
InterimResult interimResult = future.get();
|
||||
list.add(interimResult);
|
||||
}
|
||||
return list;
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private <T> Set<T> concat(Set<T> set, T elem) {
|
||||
|
@ -571,10 +599,31 @@ private List<ProjectBuildingResult> build(
|
|||
}
|
||||
}
|
||||
|
||||
return interimResults.parallelStream()
|
||||
.map(interimResult -> doBuild(projectIndex, interimResult))
|
||||
.flatMap(List::stream)
|
||||
List<Callable<List<ProjectBuildingResult>>> callables = interimResults.stream()
|
||||
.map(interimResult ->
|
||||
(Callable<List<ProjectBuildingResult>>) () -> doBuild(projectIndex, interimResult))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
List<Future<List<ProjectBuildingResult>>> futures = executor.invokeAll(callables);
|
||||
return futures.stream()
|
||||
.map(listFuture -> {
|
||||
try {
|
||||
return listFuture.get();
|
||||
} catch (InterruptedException e) {
|
||||
uncheckedThrow(e);
|
||||
return null;
|
||||
} catch (ExecutionException e) {
|
||||
uncheckedThrow(e.getCause());
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toList());
|
||||
} catch (InterruptedException e) {
|
||||
uncheckedThrow(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private List<ProjectBuildingResult> doBuild(Map<File, MavenProject> projectIndex, InterimResult interimResult) {
|
||||
|
|
|
@ -27,8 +27,6 @@
|
|||
import java.lang.reflect.Field;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ForkJoinTask;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -1870,39 +1868,32 @@ private static <T> T cache(
|
|||
String version,
|
||||
ModelCacheTag<T> tag,
|
||||
Callable<T> supplier) {
|
||||
return doWithCache(cache, supplier, s -> cache.computeIfAbsent(groupId, artifactId, version, tag.getName(), s));
|
||||
Supplier<T> s = asSupplier(supplier);
|
||||
if (cache == null) {
|
||||
return s.get();
|
||||
} else {
|
||||
return cache.computeIfAbsent(groupId, artifactId, version, tag.getName(), s);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> T cache(ModelCache cache, Source source, ModelCacheTag<T> tag, Callable<T> supplier) {
|
||||
return doWithCache(cache, supplier, s -> cache.computeIfAbsent(source, tag.getName(), s));
|
||||
Supplier<T> s = asSupplier(supplier);
|
||||
if (cache == null) {
|
||||
return s.get();
|
||||
} else {
|
||||
return cache.computeIfAbsent(source, tag.getName(), s);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> T doWithCache(
|
||||
ModelCache cache, Callable<T> supplier, Function<Supplier<Supplier<T>>, T> asyncSupplierConsumer) {
|
||||
if (cache != null) {
|
||||
return asyncSupplierConsumer.apply(() -> {
|
||||
ForkJoinTask<T> task = ForkJoinTask.adapt(supplier);
|
||||
task.fork();
|
||||
return () -> {
|
||||
task.quietlyJoin();
|
||||
if (task.isCompletedAbnormally()) {
|
||||
Throwable e = task.getException();
|
||||
while (e instanceof RuntimeException && e.getCause() != null) {
|
||||
e = e.getCause();
|
||||
}
|
||||
uncheckedThrow(e);
|
||||
}
|
||||
return task.getRawResult();
|
||||
};
|
||||
});
|
||||
} else {
|
||||
private static <T> Supplier<T> asSupplier(Callable<T> supplier) {
|
||||
return () -> {
|
||||
try {
|
||||
return supplier.call();
|
||||
} catch (Exception e) {
|
||||
uncheckedThrow(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static <T extends Throwable> void uncheckedThrow(Throwable t) throws T {
|
||||
|
|
|
@ -252,8 +252,8 @@ public Properties getSystemProperties() {
|
|||
public DefaultModelBuildingRequest setSystemProperties(Properties systemProperties) {
|
||||
if (systemProperties != null) {
|
||||
this.systemProperties = new Properties();
|
||||
synchronized (systemProperties) { // avoid concurrent modification if someone else sets/removes an unrelated
|
||||
// system property
|
||||
// avoid concurrent modification if someone else sets/removes an unrelated system property
|
||||
synchronized (systemProperties) {
|
||||
this.systemProperties.putAll(systemProperties);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -46,6 +46,8 @@ class DefaultTransformerContextBuilder implements TransformerContextBuilder {
|
|||
|
||||
private final Map<String, Set<FileModelSource>> mappedSources = new ConcurrentHashMap<>(64);
|
||||
|
||||
private volatile boolean fullReactorLoaded;
|
||||
|
||||
DefaultTransformerContextBuilder(DefaultModelBuilder defaultModelBuilder) {
|
||||
this.defaultModelBuilder = defaultModelBuilder;
|
||||
this.context = new DefaultTransformerContext(defaultModelBuilder.getModelProcessor());
|
||||
|
@ -60,8 +62,6 @@ public TransformerContext initialize(ModelBuildingRequest request, ModelProblemC
|
|||
DefaultModelProblemCollector problems = (DefaultModelProblemCollector) collector;
|
||||
return new TransformerContext() {
|
||||
|
||||
private volatile boolean fullReactorLoaded;
|
||||
|
||||
@Override
|
||||
public Path locate(Path path) {
|
||||
return context.locate(path);
|
||||
|
@ -119,7 +119,7 @@ private Model findRawModel(Path from, String groupId, String artifactId) {
|
|||
|
||||
private void loadFullReactor() {
|
||||
if (!fullReactorLoaded) {
|
||||
synchronized (this) {
|
||||
synchronized (DefaultTransformerContextBuilder.this) {
|
||||
if (!fullReactorLoaded) {
|
||||
doLoadFullReactor();
|
||||
fullReactorLoaded = true;
|
||||
|
|
|
@ -18,26 +18,20 @@
|
|||
*/
|
||||
package org.apache.maven.model.building;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
class Graph {
|
||||
|
||||
final Map<String, List<String>> graph = new LinkedHashMap<>();
|
||||
final Map<String, Set<String>> graph = new LinkedHashMap<>();
|
||||
|
||||
synchronized void addEdge(String from, String to) throws CycleDetectedException {
|
||||
graph.computeIfAbsent(from, l -> new ArrayList<>()).add(to);
|
||||
List<String> cycle = visitCycle(graph, Collections.singleton(to), new HashMap<>(), new LinkedList<>());
|
||||
if (cycle != null) {
|
||||
// remove edge which introduced cycle
|
||||
throw new CycleDetectedException(
|
||||
"Edge between '" + from + "' and '" + to + "' introduces to cycle in the graph", cycle);
|
||||
if (graph.computeIfAbsent(from, l -> new HashSet<>()).add(to)) {
|
||||
List<String> cycle = visitCycle(graph, Collections.singleton(to), new HashMap<>(), new LinkedList<>());
|
||||
if (cycle != null) {
|
||||
// remove edge which introduced cycle
|
||||
throw new CycleDetectedException(
|
||||
"Edge between '" + from + "' and '" + to + "' introduces to cycle in the graph", cycle);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,7 +41,7 @@ private enum DfsState {
|
|||
}
|
||||
|
||||
private static List<String> visitCycle(
|
||||
Map<String, List<String>> graph,
|
||||
Map<String, Set<String>> graph,
|
||||
Collection<String> children,
|
||||
Map<String, DfsState> stateMap,
|
||||
LinkedList<String> cycle) {
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
*/
|
||||
public interface ModelCache {
|
||||
|
||||
<T> T computeIfAbsent(String groupId, String artifactId, String version, String tag, Supplier<Supplier<T>> data);
|
||||
<T> T computeIfAbsent(String groupId, String artifactId, String version, String tag, Supplier<T> data);
|
||||
|
||||
<T> T computeIfAbsent(Source path, String tag, Supplier<Supplier<T>> data);
|
||||
<T> T computeIfAbsent(Source path, String tag, Supplier<T> data);
|
||||
}
|
||||
|
|
|
@ -57,23 +57,19 @@ private DefaultModelCache(ConcurrentMap<Object, Supplier<?>> cache) {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public <T> T computeIfAbsent(
|
||||
String groupId, String artifactId, String version, String tag, Supplier<Supplier<T>> data) {
|
||||
Object obj = computeIfAbsent(new GavCacheKey(groupId, artifactId, version, tag), (Supplier) data);
|
||||
return (T) obj;
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public <T> T computeIfAbsent(String groupId, String artifactId, String version, String tag, Supplier<T> data) {
|
||||
return (T) computeIfAbsent(new GavCacheKey(groupId, artifactId, version, tag), data);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public <T> T computeIfAbsent(Source path, String tag, Supplier<Supplier<T>> data) {
|
||||
Object obj = computeIfAbsent(new SourceCacheKey(path, tag), (Supplier) data);
|
||||
return (T) obj;
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public <T> T computeIfAbsent(Source path, String tag, Supplier<T> data) {
|
||||
return (T) computeIfAbsent(new SourceCacheKey(path, tag), data);
|
||||
}
|
||||
|
||||
protected Object computeIfAbsent(Object key, Supplier<Supplier<?>> data) {
|
||||
Supplier<?> s = cache.computeIfAbsent(key, k -> data.get());
|
||||
return s != null ? s.get() : null;
|
||||
protected Object computeIfAbsent(Object key, Supplier<?> data) {
|
||||
return cache.computeIfAbsent(key, k -> new CachingSupplier<>(data)).get();
|
||||
}
|
||||
|
||||
static class GavCacheKey {
|
||||
|
@ -168,4 +164,47 @@ public int hashCode() {
|
|||
return hash;
|
||||
}
|
||||
}
|
||||
|
||||
static class CachingSupplier<T> implements Supplier<T> {
|
||||
final Supplier<T> supplier;
|
||||
volatile Object value;
|
||||
|
||||
CachingSupplier(Supplier<T> supplier) {
|
||||
this.supplier = supplier;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"unchecked", "checkstyle:InnerAssignment"})
|
||||
public T get() {
|
||||
Object v;
|
||||
if ((v = value) == null) {
|
||||
synchronized (this) {
|
||||
if ((v = value) == null) {
|
||||
try {
|
||||
v = value = supplier.get();
|
||||
} catch (Exception e) {
|
||||
v = value = new AltRes(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (v instanceof AltRes) {
|
||||
uncheckedThrow(((AltRes) v).t);
|
||||
}
|
||||
return (T) v;
|
||||
}
|
||||
|
||||
static class AltRes {
|
||||
final Throwable t;
|
||||
|
||||
AltRes(Throwable t) {
|
||||
this.t = t;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T extends Throwable> void uncheckedThrow(Throwable t) throws T {
|
||||
throw (T) t; // rely on vacuous cast
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,601 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.apache.maven.internal.xml;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.UnaryOperator;
|
||||
|
||||
class ImmutableCollections {
|
||||
|
||||
private static final List<?> EMPTY_LIST = new AbstractImmutableList<Object>() {
|
||||
@Override
|
||||
public Object get(int index) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
private static final Map<?, ?> EMPTY_MAP = new AbstractImmutableMap<Object, Object>() {
|
||||
@Override
|
||||
public Set<Entry<Object, Object>> entrySet() {
|
||||
return new AbstractImmutableSet<Entry<Object, Object>>() {
|
||||
@Override
|
||||
public Iterator<Entry<Object, Object>> iterator() {
|
||||
return new Iterator<Entry<Object, Object>>() {
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entry<Object, Object> next() {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
static <E> List<E> copy(Collection<E> collection) {
|
||||
if (collection == null) {
|
||||
return emptyList();
|
||||
} else if (collection instanceof AbstractImmutableList) {
|
||||
return (List<E>) collection;
|
||||
} else {
|
||||
switch (collection.size()) {
|
||||
case 0:
|
||||
return emptyList();
|
||||
case 1:
|
||||
return singletonList(collection.iterator().next());
|
||||
case 2:
|
||||
Iterator<E> it = collection.iterator();
|
||||
return new List2<>(it.next(), it.next());
|
||||
default:
|
||||
return new ListN<>(collection);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <E> List<E> emptyList() {
|
||||
return (List<E>) EMPTY_LIST;
|
||||
}
|
||||
|
||||
static <E> List<E> singletonList(E element) {
|
||||
return new List1<>(element);
|
||||
}
|
||||
|
||||
static <K, V> Map<K, V> copy(Map<K, V> map) {
|
||||
if (map == null) {
|
||||
return emptyMap();
|
||||
} else if (map instanceof AbstractImmutableMap) {
|
||||
return map;
|
||||
} else {
|
||||
switch (map.size()) {
|
||||
case 0:
|
||||
return emptyMap();
|
||||
case 1:
|
||||
Map.Entry<K, V> entry = map.entrySet().iterator().next();
|
||||
return singletonMap(entry.getKey(), entry.getValue());
|
||||
default:
|
||||
return new MapN<>(map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <K, V> Map<K, V> emptyMap() {
|
||||
return (Map<K, V>) EMPTY_MAP;
|
||||
}
|
||||
|
||||
static <K, V> Map<K, V> singletonMap(K key, V value) {
|
||||
return new Map1<>(key, value);
|
||||
}
|
||||
|
||||
static Properties copy(Properties properties) {
|
||||
if (properties instanceof ROProperties) {
|
||||
return properties;
|
||||
}
|
||||
return new ROProperties(properties);
|
||||
}
|
||||
|
||||
private static class List1<E> extends AbstractImmutableList<E> {
|
||||
private final E element;
|
||||
|
||||
private List1(E element) {
|
||||
this.element = element;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E get(int index) {
|
||||
if (index == 0) {
|
||||
return element;
|
||||
}
|
||||
throw outOfBounds(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static class List2<E> extends AbstractImmutableList<E> {
|
||||
private final E element1;
|
||||
private final E element2;
|
||||
|
||||
private List2(E element1, E element2) {
|
||||
this.element1 = element1;
|
||||
this.element2 = element2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E get(int index) {
|
||||
if (index == 0) {
|
||||
return element1;
|
||||
} else if (index == 1) {
|
||||
return element2;
|
||||
}
|
||||
throw outOfBounds(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
|
||||
private static class ListN<E> extends AbstractImmutableList<E> {
|
||||
private final Object[] elements;
|
||||
|
||||
private ListN(Collection<E> elements) {
|
||||
this.elements = elements.toArray();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public E get(int index) {
|
||||
return (E) elements[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return elements.length;
|
||||
}
|
||||
}
|
||||
|
||||
private abstract static class AbstractImmutableList<E> extends AbstractList<E>
|
||||
implements RandomAccess, Serializable {
|
||||
@Override
|
||||
public boolean add(E e) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends E> c) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeIf(Predicate<? super E> filter) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceAll(UnaryOperator<E> operator) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sort(Comparator<? super E> c) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<E> iterator() {
|
||||
return new Itr(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<E> listIterator() {
|
||||
return new Itr(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<E> listIterator(int index) {
|
||||
if (index < 0 || index > size()) {
|
||||
throw outOfBounds(index);
|
||||
}
|
||||
return new Itr(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<E> subList(int fromIndex, int toIndex) {
|
||||
if (fromIndex < 0) {
|
||||
throw new IndexOutOfBoundsException("fromIndex = " + fromIndex);
|
||||
}
|
||||
if (toIndex > size()) {
|
||||
throw new IndexOutOfBoundsException("toIndex = " + toIndex);
|
||||
}
|
||||
if (fromIndex > toIndex) {
|
||||
throw new IllegalArgumentException("fromIndex(" + fromIndex + ") > toIndex(" + toIndex + ")");
|
||||
}
|
||||
return new SubList(fromIndex, toIndex);
|
||||
}
|
||||
|
||||
protected IndexOutOfBoundsException outOfBounds(int index) {
|
||||
return new IndexOutOfBoundsException("Index: " + index + ", Size: " + size());
|
||||
}
|
||||
|
||||
private class SubList extends AbstractImmutableList<E> {
|
||||
private final int fromIndex;
|
||||
private final int toIndex;
|
||||
|
||||
private SubList(int fromIndex, int toIndex) {
|
||||
this.fromIndex = fromIndex;
|
||||
this.toIndex = toIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E get(int index) {
|
||||
if (index < 0 || index > size()) {
|
||||
throw outOfBounds(index);
|
||||
}
|
||||
return AbstractImmutableList.this.get(fromIndex + index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return toIndex - fromIndex;
|
||||
}
|
||||
}
|
||||
|
||||
private class Itr implements ListIterator<E> {
|
||||
int index;
|
||||
|
||||
private Itr(int index) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return index < size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public E next() {
|
||||
return get(index++);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPrevious() {
|
||||
return index > 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E previous() {
|
||||
return get(--index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextIndex() {
|
||||
return index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int previousIndex() {
|
||||
return index - 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(E e) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(E e) {
|
||||
throw uoe();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static class ROProperties extends Properties {
|
||||
private ROProperties(Properties props) {
|
||||
super();
|
||||
if (props != null) {
|
||||
// Do not use super.putAll, as it may delegate to put which throws an UnsupportedOperationException
|
||||
for (Map.Entry<Object, Object> e : props.entrySet()) {
|
||||
super.put(e.getKey(), e.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object put(Object key, Object value) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(Object key) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map<?, ?> t) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceAll(BiFunction<? super Object, ? super Object, ?> function) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object putIfAbsent(Object key, Object value) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object key, Object value) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean replace(Object key, Object oldValue, Object newValue) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object replace(Object key, Object value) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object computeIfAbsent(Object key, Function<? super Object, ?> mappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object computeIfPresent(Object key, BiFunction<? super Object, ? super Object, ?> remappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compute(Object key, BiFunction<? super Object, ? super Object, ?> remappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object merge(Object key, Object value, BiFunction<? super Object, ? super Object, ?> remappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
}
|
||||
|
||||
private static class Map1<K, V> extends AbstractImmutableMap<K, V> {
|
||||
private final Entry<K, V> entry;
|
||||
|
||||
private Map1(K key, V value) {
|
||||
this.entry = new SimpleImmutableEntry<>(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Entry<K, V>> entrySet() {
|
||||
return new AbstractImmutableSet<Entry<K, V>>() {
|
||||
@Override
|
||||
public Iterator<Entry<K, V>> iterator() {
|
||||
return new Iterator<Entry<K, V>>() {
|
||||
int index = 0;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return index == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entry<K, V> next() {
|
||||
if (index++ == 0) {
|
||||
return entry;
|
||||
}
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 1;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static class MapN<K, V> extends AbstractImmutableMap<K, V> {
|
||||
private final Object[] entries;
|
||||
|
||||
private MapN(Map<K, V> map) {
|
||||
if (map != null) {
|
||||
entries = new Object[map.size()];
|
||||
int idx = 0;
|
||||
for (Entry<K, V> e : map.entrySet()) {
|
||||
entries[idx++] = new SimpleImmutableEntry<>(e.getKey(), e.getValue());
|
||||
}
|
||||
} else {
|
||||
entries = new Object[0];
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Entry<K, V>> entrySet() {
|
||||
return new AbstractImmutableSet<Entry<K, V>>() {
|
||||
@Override
|
||||
public Iterator<Entry<K, V>> iterator() {
|
||||
return new Iterator<Entry<K, V>>() {
|
||||
int index = 0;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return index < entries.length;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Entry<K, V> next() {
|
||||
if (index < entries.length) {
|
||||
return (Entry<K, V>) entries[index++];
|
||||
}
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return entries.length;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private abstract static class AbstractImmutableMap<K, V> extends AbstractMap<K, V> implements Serializable {
|
||||
@Override
|
||||
public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V putIfAbsent(K key, V value) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object key, Object value) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean replace(K key, V oldValue, V newValue) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V replace(K key, V value) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
|
||||
throw uoe();
|
||||
}
|
||||
}
|
||||
|
||||
private abstract static class AbstractImmutableSet<E> extends AbstractSet<E> implements Serializable {
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(E e) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw uoe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeIf(Predicate<? super E> filter) {
|
||||
throw uoe();
|
||||
}
|
||||
}
|
||||
|
||||
private static UnsupportedOperationException uoe() {
|
||||
return new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -23,7 +23,6 @@
|
|||
import java.io.Serializable;
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -87,10 +86,8 @@ public XmlNodeImpl(
|
|||
this.namespaceUri = namespaceUri == null ? "" : namespaceUri;
|
||||
this.name = Objects.requireNonNull(name);
|
||||
this.value = value;
|
||||
this.attributes =
|
||||
attributes != null ? Collections.unmodifiableMap(new HashMap<>(attributes)) : Collections.emptyMap();
|
||||
this.children =
|
||||
children != null ? Collections.unmodifiableList(new ArrayList<>(children)) : Collections.emptyList();
|
||||
this.attributes = ImmutableCollections.copy(attributes);
|
||||
this.children = ImmutableCollections.copy(children);
|
||||
this.location = location;
|
||||
}
|
||||
|
||||
|
|
|
@ -485,11 +485,15 @@ private static class MapN<K, V> extends AbstractImmutableMap<K, V> {
|
|||
private final Object[] entries;
|
||||
|
||||
private MapN(Map<K, V> map) {
|
||||
entries = map != null
|
||||
? map.entrySet().stream()
|
||||
.map(e -> new SimpleImmutableEntry<>(e.getKey(), e.getValue()))
|
||||
.toArray()
|
||||
: new Object[0];
|
||||
if (map != null) {
|
||||
entries = new Object[map.size()];
|
||||
int idx = 0;
|
||||
for (Map.Entry<K, V> e : map.entrySet()) {
|
||||
entries[idx++] = new SimpleImmutableEntry<>(e.getKey(), e.getValue());
|
||||
}
|
||||
} else {
|
||||
entries = new Object[0];
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue