Move Streams.copy into elasticsearch-core and make a multi-release jar (#29322)

* Move Streams.copy into elasticsearch-core and make a multi-release jar

This moves the method `Streams.copy(InputStream in, OutputStream out)` into the
`elasticsearch-core` project (inside the `o.e.core.internal.io` package). It
also makes this class into a multi-release class where the Java 9 equivalent
uses `InputStream#transferTo`.

This is a followup from
https://github.com/elastic/elasticsearch/pull/29300#discussion_r178147495
This commit is contained in:
Lee Hinman 2018-04-06 11:07:20 -06:00 committed by GitHub
parent a93c942927
commit a07ba9e400
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 279 additions and 56 deletions

View File

@ -26,6 +26,45 @@ apply plugin: 'nebula.maven-scm'
archivesBaseName = 'elasticsearch-core'
// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 9 so we do not include this source set in our IDEs
if (!isEclipse && !isIdea) {
sourceSets {
java9 {
java {
srcDirs = ['src/main/java9']
}
}
}
configurations {
java9Compile.extendsFrom(compile)
}
dependencies {
java9Compile sourceSets.main.output
}
compileJava9Java {
sourceCompatibility = 9
targetCompatibility = 9
}
/* Enable this when forbiddenapis was updated to 2.6.
* See: https://github.com/elastic/elasticsearch/issues/29292
forbiddenApisJava9 {
targetCompatibility = 9
}
*/
jar {
metaInf {
into 'versions/9'
from sourceSets.java9.output
}
manifest.attributes('Multi-Release': 'true')
}
}
publishing {
publications {
nebula {
@ -39,6 +78,10 @@ dependencies {
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
if (!isEclipse && !isIdea) {
java9Compile sourceSets.main.output
}
if (isEclipse == false || project.path == ":libs:elasticsearch-core-tests") {
testCompile("org.elasticsearch.test:framework:${version}") {
exclude group: 'org.elasticsearch', module: 'elasticsearch-core'
@ -66,14 +109,14 @@ if (isEclipse) {
}
thirdPartyAudit.excludes = [
// from log4j
'org/osgi/framework/AdaptPermission',
'org/osgi/framework/AdminPermission',
'org/osgi/framework/Bundle',
'org/osgi/framework/BundleActivator',
'org/osgi/framework/BundleContext',
'org/osgi/framework/BundleEvent',
'org/osgi/framework/SynchronousBundleListener',
'org/osgi/framework/wiring/BundleWire',
'org/osgi/framework/wiring/BundleWiring'
// from log4j
'org/osgi/framework/AdaptPermission',
'org/osgi/framework/AdminPermission',
'org/osgi/framework/Bundle',
'org/osgi/framework/BundleActivator',
'org/osgi/framework/BundleContext',
'org/osgi/framework/BundleEvent',
'org/osgi/framework/SynchronousBundleListener',
'org/osgi/framework/wiring/BundleWire',
'org/osgi/framework/wiring/BundleWiring'
]

View File

@ -41,45 +41,73 @@ public final class IOUtils {
}
/**
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are ignored. After everything is closed, the
* method either throws the first exception it hit while closing, or completes normally if there were no exceptions.
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are
* ignored. After everything is closed, the method either throws the first exception it hit
* while closing with other exceptions added as suppressed, or completes normally if there were
* no exceptions.
*
* @param objects objects to close
*/
public static void close(final Closeable... objects) throws IOException {
close(Arrays.asList(objects));
close(null, Arrays.asList(objects));
}
/**
* Closes all given {@link Closeable}s.
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are
* ignored. After everything is closed, the method adds any exceptions as suppressed to the
* original exception, or throws the first exception it hit if {@code Exception} is null. If
* no exceptions are encountered and the passed in exception is null, it completes normally.
*
* @param objects objects to close
*/
public static void close(final Exception e, final Closeable... objects) throws IOException {
close(e, Arrays.asList(objects));
}
/**
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are
* ignored. After everything is closed, the method either throws the first exception it hit
* while closing with other exceptions added as suppressed, or completes normally if there were
* no exceptions.
*
* @param objects objects to close
*/
public static void close(final Iterable<? extends Closeable> objects) throws IOException {
close(null, objects);
}
/**
* Closes all given {@link Closeable}s. If a non-null exception is passed in, or closing a
* stream causes an exception, throws the exception with other {@link RuntimeException} or
* {@link IOException} exceptions added as suppressed.
*
* @param ex existing Exception to add exceptions occurring during close to
* @param objects objects to close
*
* @see #close(Closeable...)
*/
public static void close(final Iterable<? extends Closeable> objects) throws IOException {
Exception ex = null;
public static void close(final Exception ex, final Iterable<? extends Closeable> objects) throws IOException {
Exception firstException = ex;
for (final Closeable object : objects) {
try {
if (object != null) {
object.close();
}
} catch (final IOException | RuntimeException e) {
if (ex == null) {
ex = e;
if (firstException == null) {
firstException = e;
} else {
ex.addSuppressed(e);
firstException.addSuppressed(e);
}
}
}
if (ex != null) {
if (ex instanceof IOException) {
throw (IOException) ex;
if (firstException != null) {
if (firstException instanceof IOException) {
throw (IOException) firstException;
} else {
// since we only assigned an IOException or a RuntimeException to ex above, in this case ex must be a RuntimeException
throw (RuntimeException) ex;
throw (RuntimeException) firstException;
}
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.core.internal.io;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Objects;
/**
* Simple utility methods for file and stream copying.
* All copy methods use a block size of 4096 bytes,
* and close all affected streams when done.
* <p>
* Mainly for use within the framework,
* but also useful for application code.
*/
public class Streams {
/**
* Copy the contents of the given InputStream to the given OutputStream.
* Closes both streams when done.
*
* @param in the stream to copy from
* @param out the stream to copy to
* @return the number of bytes copied
* @throws IOException in case of I/O errors
*/
public static long copy(final InputStream in, final OutputStream out) throws IOException {
Objects.requireNonNull(in, "No InputStream specified");
Objects.requireNonNull(out, "No OutputStream specified");
final byte[] buffer = new byte[8192];
Exception err = null;
try {
long byteCount = 0;
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
byteCount += bytesRead;
}
out.flush();
return byteCount;
} catch (IOException | RuntimeException e) {
err = e;
throw e;
} finally {
IOUtils.close(err, in, out);
}
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.core.internal.io;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* Simple utility methods for file and stream copying.
* All copy methods close all affected streams when done.
* <p>
* Mainly for use within the framework,
* but also useful for application code.
*/
public abstract class Streams {
/**
* Copy the contents of the given InputStream to the given OutputStream.
* Closes both streams when done.
*
* @param in the stream to copy from
* @param out the stream to copy to
* @return the number of bytes copied
* @throws IOException in case of I/O errors
*/
public static long copy(final InputStream in, final OutputStream out) throws IOException {
Exception err = null;
try {
final long byteCount = in.transferTo(out);
out.flush();
return byteCount;
} catch (IOException | RuntimeException e) {
err = e;
throw e;
} finally {
IOUtils.close(err, in, out);
}
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.core.internal.io;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static org.hamcrest.Matchers.equalTo;
public class StreamsTests extends ESTestCase {
public void testCopyFromInputStream() throws IOException {
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
ByteArrayInputStream in = new ByteArrayInputStream(content);
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
long count = Streams.copy(in, out);
assertThat(count, equalTo((long) content.length));
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
}
}

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.Streams;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;

View File

@ -22,7 +22,7 @@ import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.mocksocket.MockHttpServer;
import org.elasticsearch.repositories.gcs.GoogleCloudStorageTestServer.Response;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.core.internal.io.Streams;
import java.io.BufferedInputStream;
import java.io.FileNotFoundException;
@ -128,7 +128,7 @@ public class FsBlobContainer extends AbstractBlobContainer {
}
final Path file = path.resolve(blobName);
try (OutputStream outputStream = Files.newOutputStream(file, StandardOpenOption.CREATE_NEW)) {
Streams.copy(inputStream, outputStream, new byte[blobStore.bufferSizeInBytes()]);
Streams.copy(inputStream, outputStream);
}
IOUtils.fsync(file, false);
IOUtils.fsync(path, true);

View File

@ -21,11 +21,11 @@ package org.elasticsearch.common.compress;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.Streams;
import java.io.IOException;
import java.util.Objects;

View File

@ -38,13 +38,13 @@ import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.CancellableThreads;
import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.RecoveryEngineException;
import org.elasticsearch.index.seqno.LocalCheckpointTracker;

View File

@ -55,7 +55,6 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.NotXContentException;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -75,6 +74,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException;

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.store.ByteArrayIndexInput;
@ -39,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.gateway.CorruptStateException;
import java.io.ByteArrayOutputStream;

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.path.PathTrie;
@ -35,6 +34,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.usage.UsageService;
@ -51,7 +51,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;

View File

@ -19,10 +19,9 @@
package org.elasticsearch.tasks;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
@ -38,13 +37,12 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.Streams;
import java.io.ByteArrayOutputStream;
import java.io.IOException;

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.StringReader;
@ -32,7 +31,6 @@ import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static org.elasticsearch.common.io.Streams.copy;
import static org.elasticsearch.common.io.Streams.copyToString;
import static org.hamcrest.Matchers.equalTo;
@ -40,20 +38,11 @@ import static org.hamcrest.Matchers.equalTo;
* Unit tests for {@link org.elasticsearch.common.io.Streams}.
*/
public class StreamsTests extends ESTestCase {
public void testCopyFromInputStream() throws IOException {
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
ByteArrayInputStream in = new ByteArrayInputStream(content);
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
long count = copy(in, out);
assertThat(count, equalTo((long) content.length));
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
}
public void testCopyFromByteArray() throws IOException {
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
copy(content, out);
Streams.copy(content, out);
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
}
@ -61,7 +50,7 @@ public class StreamsTests extends ESTestCase {
String content = "content";
StringReader in = new StringReader(content);
StringWriter out = new StringWriter();
int count = copy(in, out);
int count = Streams.copy(in, out);
assertThat(content.length(), equalTo(count));
assertThat(out.toString(), equalTo(content));
}
@ -69,7 +58,7 @@ public class StreamsTests extends ESTestCase {
public void testCopyFromString() throws IOException {
String content = "content";
StringWriter out = new StringWriter();
copy(content, out);
Streams.copy(content, out);
assertThat(out.toString(), equalTo(content));
}

View File

@ -42,12 +42,12 @@ import org.elasticsearch.common.geo.builders.LineStringBuilder;
import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
import org.elasticsearch.common.geo.builders.PointBuilder;
import org.elasticsearch.common.geo.builders.PolygonBuilder;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchHit;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.test;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.core.internal.io.Streams;
import java.io.FileNotFoundException;
import java.io.IOException;
@ -36,7 +36,7 @@ public class StreamsUtils {
if (is == null) {
throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]");
}
return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
return org.elasticsearch.common.io.Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
}
public static String copyToStringFromClasspath(String path) throws IOException {
@ -44,7 +44,7 @@ public class StreamsUtils {
if (is == null) {
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
}
return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
return org.elasticsearch.common.io.Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
}
public static byte[] copyToBytesFromClasspath(String path) throws IOException {