Add azure integrationt ests (#15799)

This commit is contained in:
George Shiqi Wu 2024-02-01 09:18:49 -05:00 committed by GitHub
parent 5de39c6251
commit 50bae96e8b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 115 additions and 0 deletions

View File

@ -48,9 +48,11 @@ public abstract class AbstractCloudInputSourceParallelIndexTest extends Abstract
private static final String WIKIPEDIA_DATA_3 = "wikipedia_index_data3.json";
private static final String GOOGLE = "google";
private static final String AZURE = "azure";
private static final String AZURE_V2 = "azureStorage";
private static final String GOOGLE_PREFIX = "googlePrefix";
private static final String GOOGLE_BUCKET = "googleBucket";
private static final String AZURE_CONTAINER = "azureContainer";
private static final String AZURE_STORAGE_ACCOUNT = "azureAccount";
private static final Logger LOG = new Logger(AbstractCloudInputSourceParallelIndexTest.class);
String indexDatasource = "wikipedia_cloud_index_test_";
@ -106,6 +108,8 @@ public abstract class AbstractCloudInputSourceParallelIndexTest extends Abstract
{
if (GOOGLE.equals(inputSourceType)) {
return config.getProperty(GOOGLE_PREFIX);
} else if (AZURE_V2.equals(inputSourceType)) {
return config.getProperty(AZURE_CONTAINER) + "/" + config.getCloudPath();
} else {
return config.getCloudPath();
}
@ -117,6 +121,8 @@ public abstract class AbstractCloudInputSourceParallelIndexTest extends Abstract
return config.getProperty(GOOGLE_BUCKET);
} else if (AZURE.equals(inputSourceType)) {
return config.getProperty(AZURE_CONTAINER);
} else if (AZURE_V2.equals(inputSourceType)) {
return config.getProperty(AZURE_STORAGE_ACCOUNT);
} else {
return config.getCloudBucket();
}

View File

@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.testsEx.indexer;
import junitparams.Parameters;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.testsEx.categories.AzureDeepStorage;
import org.apache.druid.testsEx.config.DruidTestRunner;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.List;
/**
* IMPORTANT:
* To run this test, you must set the following env variables in the build environment -
* DRUID_CLOUD_PATH - path inside the container where the test data files will be uploaded
* <p>
* The AZURE account, key and container should be set in AZURE_ACCOUNT, AZURE_KEY and AZURE_CONTAINER respectively.
* <p>
* <a href="https://druid.apache.org/docs/latest/development/extensions-core/azure.html">Azure Deep Storage setup in druid</a>
*/
@RunWith(DruidTestRunner.class)
@Category(AzureDeepStorage.class)
public class ITAzureV2ParallelIndexTest extends AbstractAzureInputSourceParallelIndexTest
{
@Test
@Parameters(method = "resources")
public void testAzureIndexData(Pair<String, List<?>> azureInputSource) throws Exception
{
doTest(azureInputSource, new Pair<>(false, false), "azureStorage");
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.testsEx.msq;
import junitparams.Parameters;
import junitparams.naming.TestCaseName;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.testsEx.categories.AzureDeepStorage;
import org.apache.druid.testsEx.config.DruidTestRunner;
import org.apache.druid.testsEx.indexer.AbstractAzureInputSourceParallelIndexTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.List;
/**
* IMPORTANT:
* To run this test, you must set the following env variables in the build environment -
* <p>
* The AZURE account, key and container should be set in AZURE_ACCOUNT, AZURE_KEY and AZURE_CONTAINER respectively.
* <p>
* <a href="https://druid.apache.org/docs/latest/development/extensions-core/azure.html">Azure Deep Storage setup in druid</a>
*/
@RunWith(DruidTestRunner.class)
@Category(AzureDeepStorage.class)
public class ITAzureV2SQLBasedIngestionTest extends AbstractAzureInputSourceParallelIndexTest
{
private static final String CLOUD_INGEST_SQL = "/multi-stage-query/wikipedia_cloud_index_msq.sql";
private static final String INDEX_QUERIES_FILE = "/multi-stage-query/wikipedia_index_queries.json";
@Test
@Parameters(method = "resources")
@TestCaseName("Test_{index} ({0})")
public void testSQLBasedBatchIngestion(Pair<String, List<?>> azureStorageInputSource)
{
doMSQTest(azureStorageInputSource, CLOUD_INGEST_SQL, INDEX_QUERIES_FILE, "azureStorage");
}
}