OpenSearch/plugins/repository-gcs/qa/google-cloud-storage/build.gradle

177 lines
6.5 KiB
Groovy
Raw Normal View History

/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.info.BuildParams
Fix security manager bug writing large blobs to GCS (#55421) * Fix security manager bug writing large blobs to GCS This commit addresses a security manager permissions issue writing large blobs (on the resumable upload path) to GCS. The underlying issue here is that we need to wrap the close and write calls on the channel. It is not enough to do this: SocketAccess.doPrivilegedVoidIOException( () -> Streams.copy( inputStream, Channels.newOutputStream(client().writer(blobInfo, writeOptions)))); This reason that this is not enough is because Streams#copy will be in the stacktrace and it is not granted the security manager permissions needed to close or write this channel. We only grant those permissions to classes loaded in the plugin classloader, and Streams#copy is from the parent classloader. This is why we must wrap the close and write calls as privileged, to truncate the Streams#copy call out of the stacktrace. The reason that this issue is not caught in testing is because the size of data that we use in testing is too small to trigger the large blob resumable upload path. Therefore, we address this by adding a system property to control the threshold, which we can then set in tests to exercise this code path. Prior to rewriting the writeBlobResumable method to wrap the close and write calls as privileged, with this additional test, we are able to reproduce the security manager permissions issue. After adding the wrapping, this test now passes. * Fix forbidden APIs issue * Remove leftover debugging
2020-04-17 18:48:34 -04:00
import org.elasticsearch.gradle.test.RestIntegTestTask
import java.nio.file.Files
import java.security.KeyPair
import java.security.KeyPairGenerator
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.test.fixtures'
// TODO think about flattening qa:google-cloud-storage project into parent
dependencies {
testCompile project(path: ':plugins:repository-gcs')
}
restResources {
restApi {
includeCore '_common', 'snapshot','indices', 'index', 'bulk', 'count'
}
}
testFixtures.useFixture(':test:fixtures:gcs-fixture', 'gcs-fixture')
testFixtures.useFixture(':test:fixtures:gcs-fixture', 'gcs-fixture-third-party')
boolean useFixture = false
String gcsServiceAccount = System.getenv("google_storage_service_account")
String gcsBucket = System.getenv("google_storage_bucket")
String gcsBasePath = System.getenv("google_storage_base_path")
File serviceAccountFile = null
if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) {
serviceAccountFile = new File(project.buildDir, 'generated-resources/service_account_test.json')
gcsBucket = 'bucket'
gcsBasePath = 'integration_test'
useFixture = true
} else if (!gcsServiceAccount || !gcsBucket || !gcsBasePath) {
throw new IllegalArgumentException("not all options specified to run tests against external GCS service are present")
} else {
serviceAccountFile = new File(gcsServiceAccount)
}
def encodedCredentials = {
Base64.encoder.encodeToString(Files.readAllBytes(serviceAccountFile.toPath()))
}
def fixtureAddress = { fixture ->
assert useFixture: 'closure should not be used without a fixture'
int ephemeralPort = project(':test:fixtures:gcs-fixture').postProcessFixture.ext."test.fixtures.${fixture}.tcp.80"
assert ephemeralPort > 0
'http://127.0.0.1:' + ephemeralPort
}
/** A service account file that points to the Google Cloud Storage service emulated by the fixture **/
task createServiceAccountFile() {
doLast {
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA")
keyPairGenerator.initialize(1024)
KeyPair keyPair = keyPairGenerator.generateKeyPair()
String encodedKey = Base64.getEncoder().encodeToString(keyPair.private.getEncoded())
serviceAccountFile.parentFile.mkdirs()
serviceAccountFile.setText("{\n" +
' "type": "service_account",\n' +
' "project_id": "integration_test",\n' +
' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' +
' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' +
' "client_email": "integration_test@appspot.gserviceaccount.com",\n' +
' "client_id": "123456789101112130594"\n' +
'}', 'UTF-8')
}
}
task thirdPartyTest(type: Test) {
if (useFixture) {
thirdPartyTest.dependsOn createServiceAccountFile
nonInputProperties.systemProperty 'test.google.endpoint', "${-> fixtureAddress('gcs-fixture-third-party')}"
nonInputProperties.systemProperty 'test.google.tokenURI', "${-> fixtureAddress('gcs-fixture-third-party')}/o/oauth2/token"
gradle.taskGraph.whenReady {
if (it.hasTask(gcsThirdPartyTests)) {
throw new IllegalStateException("Tried to run third party tests but not all of the necessary environment variables " +
"'google_storage_service_account', 'google_storage_bucket', 'google_storage_base_path' are set.")
}
}
}
include '**/GoogleCloudStorageThirdPartyTests.class'
systemProperty 'tests.security.manager', false
systemProperty 'test.google.bucket', gcsBucket
nonInputProperties.systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + BuildParams.testSeed
nonInputProperties.systemProperty 'test.google.account', "${-> encodedCredentials.call()}"
}
task gcsThirdPartyTests {
dependsOn check
}
check.dependsOn thirdPartyTest
Fix security manager bug writing large blobs to GCS (#55421) * Fix security manager bug writing large blobs to GCS This commit addresses a security manager permissions issue writing large blobs (on the resumable upload path) to GCS. The underlying issue here is that we need to wrap the close and write calls on the channel. It is not enough to do this: SocketAccess.doPrivilegedVoidIOException( () -> Streams.copy( inputStream, Channels.newOutputStream(client().writer(blobInfo, writeOptions)))); This reason that this is not enough is because Streams#copy will be in the stacktrace and it is not granted the security manager permissions needed to close or write this channel. We only grant those permissions to classes loaded in the plugin classloader, and Streams#copy is from the parent classloader. This is why we must wrap the close and write calls as privileged, to truncate the Streams#copy call out of the stacktrace. The reason that this issue is not caught in testing is because the size of data that we use in testing is too small to trigger the large blob resumable upload path. Therefore, we address this by adding a system property to control the threshold, which we can then set in tests to exercise this code path. Prior to rewriting the writeBlobResumable method to wrap the close and write calls as privileged, with this additional test, we are able to reproduce the security manager permissions issue. After adding the wrapping, this test now passes. * Fix forbidden APIs issue * Remove leftover debugging
2020-04-17 18:48:34 -04:00
integTest.mustRunAfter(thirdPartyTest)
/*
* We only use a small amount of data in these tests, which means that the resumable upload path is not tested. We add
* an additional test that forces the large blob threshold to be small to exercise the resumable upload path.
*/
task largeBlobIntegTest(type: RestIntegTestTask) {
mustRunAfter(thirdPartyTest)
}
check.dependsOn integTest
check.dependsOn largeBlobIntegTest
Map<String, Object> expansions = [
'bucket': gcsBucket,
'base_path': gcsBasePath + "_integration_tests"
]
processTestResources {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}
Fix security manager bug writing large blobs to GCS (#55421) * Fix security manager bug writing large blobs to GCS This commit addresses a security manager permissions issue writing large blobs (on the resumable upload path) to GCS. The underlying issue here is that we need to wrap the close and write calls on the channel. It is not enough to do this: SocketAccess.doPrivilegedVoidIOException( () -> Streams.copy( inputStream, Channels.newOutputStream(client().writer(blobInfo, writeOptions)))); This reason that this is not enough is because Streams#copy will be in the stacktrace and it is not granted the security manager permissions needed to close or write this channel. We only grant those permissions to classes loaded in the plugin classloader, and Streams#copy is from the parent classloader. This is why we must wrap the close and write calls as privileged, to truncate the Streams#copy call out of the stacktrace. The reason that this issue is not caught in testing is because the size of data that we use in testing is too small to trigger the large blob resumable upload path. Therefore, we address this by adding a system property to control the threshold, which we can then set in tests to exercise this code path. Prior to rewriting the writeBlobResumable method to wrap the close and write calls as privileged, with this additional test, we are able to reproduce the security manager permissions issue. After adding the wrapping, this test now passes. * Fix forbidden APIs issue * Remove leftover debugging
2020-04-17 18:48:34 -04:00
final Closure integTestConfiguration = {
dependsOn project(':plugins:repository-gcs').bundlePlugin
}
Fix security manager bug writing large blobs to GCS (#55421) * Fix security manager bug writing large blobs to GCS This commit addresses a security manager permissions issue writing large blobs (on the resumable upload path) to GCS. The underlying issue here is that we need to wrap the close and write calls on the channel. It is not enough to do this: SocketAccess.doPrivilegedVoidIOException( () -> Streams.copy( inputStream, Channels.newOutputStream(client().writer(blobInfo, writeOptions)))); This reason that this is not enough is because Streams#copy will be in the stacktrace and it is not granted the security manager permissions needed to close or write this channel. We only grant those permissions to classes loaded in the plugin classloader, and Streams#copy is from the parent classloader. This is why we must wrap the close and write calls as privileged, to truncate the Streams#copy call out of the stacktrace. The reason that this issue is not caught in testing is because the size of data that we use in testing is too small to trigger the large blob resumable upload path. Therefore, we address this by adding a system property to control the threshold, which we can then set in tests to exercise this code path. Prior to rewriting the writeBlobResumable method to wrap the close and write calls as privileged, with this additional test, we are able to reproduce the security manager permissions issue. After adding the wrapping, this test now passes. * Fix forbidden APIs issue * Remove leftover debugging
2020-04-17 18:48:34 -04:00
integTest integTestConfiguration
largeBlobIntegTest integTestConfiguration
final Closure testClustersConfiguration = {
plugin project(':plugins:repository-gcs').bundlePlugin.archiveFile
keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile, IGNORE_VALUE
if (useFixture) {
tasks.integTest.dependsOn createServiceAccountFile
/* Use a closure on the string to delay evaluation until tests are executed */
setting 'gcs.client.integration_test.endpoint', { "${-> fixtureAddress('gcs-fixture')}" }, IGNORE_VALUE
setting 'gcs.client.integration_test.token_uri', { "${-> fixtureAddress('gcs-fixture')}/o/oauth2/token" }, IGNORE_VALUE
} else {
println "Using an external service to test the repository-gcs plugin"
}
}
Fix security manager bug writing large blobs to GCS (#55421) * Fix security manager bug writing large blobs to GCS This commit addresses a security manager permissions issue writing large blobs (on the resumable upload path) to GCS. The underlying issue here is that we need to wrap the close and write calls on the channel. It is not enough to do this: SocketAccess.doPrivilegedVoidIOException( () -> Streams.copy( inputStream, Channels.newOutputStream(client().writer(blobInfo, writeOptions)))); This reason that this is not enough is because Streams#copy will be in the stacktrace and it is not granted the security manager permissions needed to close or write this channel. We only grant those permissions to classes loaded in the plugin classloader, and Streams#copy is from the parent classloader. This is why we must wrap the close and write calls as privileged, to truncate the Streams#copy call out of the stacktrace. The reason that this issue is not caught in testing is because the size of data that we use in testing is too small to trigger the large blob resumable upload path. Therefore, we address this by adding a system property to control the threshold, which we can then set in tests to exercise this code path. Prior to rewriting the writeBlobResumable method to wrap the close and write calls as privileged, with this additional test, we are able to reproduce the security manager permissions issue. After adding the wrapping, this test now passes. * Fix forbidden APIs issue * Remove leftover debugging
2020-04-17 18:48:34 -04:00
testClusters.integTest testClustersConfiguration
testClusters.largeBlobIntegTest testClustersConfiguration
testClusters.largeBlobIntegTest {
// force large blob uploads by setting the threshold small, forcing this code path to be tested
systemProperty 'es.repository_gcs.large_blob_threshold_byte_size', '256'
}