Update container creation in AzureTestUtil.java (#13911)

*
1. Handling deletion/creation of container created during the previously run test in AzureTestUtil.java.
2. Adding/updating log messages and comments in Azure and GCS deep storage tests.
This commit is contained in:
abhagraw 2023-03-16 11:04:43 +05:30 committed by GitHub
parent 65a663adbb
commit c7d864d3bc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 40 additions and 13 deletions

View File

@ -48,7 +48,7 @@ env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 5
jobs:
test: # Github job that runs a given revised/new IT against retrieved cached druid docker image
test: # GitHub job that runs a given revised/new IT against retrieved cached druid docker image
name: ${{ inputs.it }} integration test (Compile=jdk${{ inputs.build_jdk }}, Run=jdk${{ inputs.runtime_jdk }}, Indexer=${{ inputs.use_indexer }})
runs-on: ubuntu-22.04
steps:

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# Github workflow that runs revised/new ITs
# GitHub workflow that runs revised/new ITs
on:
workflow_call:
workflow_dispatch:

View File

@ -42,6 +42,7 @@ public class AbstractAzureInputSourceParallelIndexTest extends AbstractCloudInpu
public static void uploadDataFilesToAzure()
{
try {
LOG.info("Uploading files to Azure");
azure = new AzureTestUtil();
// Creating a container with name set in AZURE_CONTAINER env variable.
azure.createStorageContainer();

View File

@ -31,15 +31,13 @@ import java.util.List;
/**
* IMPORTANT:
* To run this test, you must:
* 1) Set the bucket and path for your data. This can be done by setting -Ddruid.test.config.cloudBucket and
* -Ddruid.test.config.cloudPath or setting "cloud_bucket" and "cloud_path" in the config file.
* 2) Copy wikipedia_index_data1.json, wikipedia_index_data2.json, and wikipedia_index_data3.json
* located in integration-tests/src/test/resources/data/batch_index/json to your GCS at the location set in step 1.
* 3) Provide -Doverride.config.path=<PATH_TO_FILE> with gcs configs set. See
* integration-tests/docker/environment-configs/override-examples/gcs for env vars to provide.
* 4) Provide -Dresource.file.dir.path=<PATH_TO_FOLDER> with folder that contains GOOGLE_APPLICATION_CREDENTIALS file
* To run this test, you must set the following env variables in the build environment -
* GOOGLE_PREFIX - path inside the bucket where the test data files will be uploaded
* GOOGLE_BUCKET - Google cloud bucket name
* GOOGLE_APPLICATION_CREDENTIALS - path to the json file containing google cloud credentials
* <a href="https://druid.apache.org/docs/latest/development/extensions-core/google.html">Google Cloud Storage setup in druid</a>
*/
@RunWith(DruidTestRunner.class)
@Category(GcsDeepStorage.class)
public class ITGcsToGcsParallelIndexTest extends AbstractGcsInputSourceParallelIndexTest

View File

@ -22,7 +22,7 @@ package org.apache.druid.testsEx.msq;
import junitparams.Parameters;
import junitparams.naming.TestCaseName;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.testsEx.categories.S3DeepStorage;
import org.apache.druid.testsEx.categories.AzureDeepStorage;
import org.apache.druid.testsEx.config.DruidTestRunner;
import org.apache.druid.testsEx.indexer.AbstractAzureInputSourceParallelIndexTest;
import org.junit.Test;
@ -41,7 +41,7 @@ import java.util.List;
*/
@RunWith(DruidTestRunner.class)
@Category(S3DeepStorage.class)
@Category(AzureDeepStorage.class)
public class ITAzureSQLBasedIngestionTest extends AbstractAzureInputSourceParallelIndexTest
{
private static final String CLOUD_INGEST_SQL = "/multi-stage-query/wikipedia_cloud_index_msq.sql";

View File

@ -31,6 +31,15 @@ import org.junit.runner.RunWith;
import java.util.List;
/**
* IMPORTANT:
* To run this test, you must set the following env variables in the build environment -
* GOOGLE_PREFIX - path inside the bucket where the test data files will be uploaded
* GOOGLE_BUCKET - Google cloud bucket name
* GOOGLE_APPLICATION_CREDENTIALS - path to the json file containing google cloud credentials
* <a href="https://druid.apache.org/docs/latest/development/extensions-core/google.html">Google Cloud Storage setup in druid</a>
*/
@RunWith(DruidTestRunner.class)
@Category(GcsDeepStorage.class)
public class ITGcsSQLBasedIngestionTest extends AbstractGcsInputSourceParallelIndexTest

View File

@ -25,6 +25,7 @@ import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.CloudBlockBlob;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.testing.utils.ITRetryUtil;
import java.io.File;
import java.io.IOException;
@ -83,9 +84,26 @@ public class AzureTestUtil
public void createStorageContainer() throws URISyntaxException, StorageException
{
LOG.info("Creating azure container " + AZURE_CONTAINER);
CloudBlobContainer container = azureStorageClient.getContainerReference(AZURE_CONTAINER);
// Create the container if it does not exist.
container.createIfNotExists();
// From the azure documentation -
// When a container is deleted, a container with the same name can't be created for at least 30 seconds.
// The container might not be available for more than 30 seconds if the service is still processing the request.
// While the container is being deleted, attempts to create a container of the same name fail with status
// code 409 (Conflict). The service indicates that the container is being deleted.
// All other operations, including operations on any blobs under the container,
// fail with status code 404 (Not Found) while the container is being deleted.
ITRetryUtil.retryUntil(
() -> container.createIfNotExists(),
true,
10000,
13,
"Create Azure container : " + AZURE_CONTAINER + " "
);
LOG.info("Azure container " + AZURE_CONTAINER + " created");
}
public void deleteStorageContainer() throws URISyntaxException, StorageException
@ -109,6 +127,7 @@ public class AzureTestUtil
// Create or overwrite the "myimage.jpg" blob with contents from a local file.
File source = new File(filePath);
CloudBlockBlob blob = container.getBlockBlobReference(DRUID_CLOUD_PATH + '/' + source.getName());
LOG.info("Uploading file " + DRUID_CLOUD_PATH + '/' + source.getName() + " in azure container " + AZURE_CONTAINER);
blob.upload(Files.newInputStream(source.toPath()), source.length());
}
}