Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AzureIntegrationTests share one billing profile #1702

Closed
28 changes: 15 additions & 13 deletions .github/workflows/int-and-connected-test-run.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,15 +77,15 @@ jobs:
with:
path: ${{ env.CACHE_PATHS }}
key: ${{ runner.os }}-build-unit }}
- name: "Run unit tests and sonar scan via Gradle"
uses: broadinstitute/datarepo-actions/actions/[email protected]
with:
actions_subcommand: 'gradleinttest'
pgport: ${{ job.services.postgres.ports[5432] }}
test_to_run: 'check'
role_id: ${{ secrets.ROLE_ID }}
secret_id: ${{ secrets.SECRET_ID }}
sonar_token: ${{ secrets.SONAR_TOKEN }}
# - name: "Run unit tests and sonar scan via Gradle"
# uses: broadinstitute/datarepo-actions/actions/[email protected]
# with:
# actions_subcommand: 'gradleinttest'
# pgport: ${{ job.services.postgres.ports[5432] }}
# test_to_run: 'check'
# role_id: ${{ secrets.ROLE_ID }}
# secret_id: ${{ secrets.SECRET_ID }}
# sonar_token: ${{ secrets.SONAR_TOKEN }}
test_connected:
name: "Run connected tests"
outputs:
Expand Down Expand Up @@ -204,16 +204,18 @@ jobs:
env:
DESIRED_GITHASH: ${{ steps.configuration.outputs.git_hash }}
DEPLOYMENT_TYPE: 'api'
- name: "Run Test Runner smoke tests via Gradle"
uses: broadinstitute/datarepo-actions/actions/[email protected]
with:
actions_subcommand: 'gradletestrunnersmoketest'
# DO NOT MERGE.
# - name: "Run Test Runner smoke tests via Gradle"
# uses: broadinstitute/datarepo-actions/actions/[email protected]
# with:
# actions_subcommand: 'gradletestrunnersmoketest'
- name: "Run integration tests via Gradle"
uses: broadinstitute/datarepo-actions/actions/[email protected]
with:
actions_subcommand: 'gradleinttest'
pgport: ${{ job.services.postgres.ports[5432] }}
test_to_run: 'testIntegration'
test_filter: 'AzureIntegrationTest' ## DO NOT MERGE.
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I won't check this in, but PSA that this is a supported filter by way of this PR: broadinstitute/datarepo-actions#74

env:
AZURE_CREDENTIALS_APPLICATIONID: ${{ env.AZURE_CREDENTIALS_APPLICATIONID }}
AZURE_CREDENTIALS_HOMETENANTID: ${{ env.AZURE_CREDENTIALS_HOMETENANTID }}
Expand Down
4 changes: 3 additions & 1 deletion src/test/java/bio/terra/common/category/Integration.java
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
package bio.terra.common.category;

/** Integration test category. */
public interface Integration {}
public interface Integration {
String TAG = "bio.terra.common.category.Integration";
}
97 changes: 52 additions & 45 deletions src/test/java/bio/terra/integration/AzureIntegrationTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;

import bio.terra.app.model.AzureCloudResource;
Expand Down Expand Up @@ -117,28 +116,31 @@
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.hamcrest.CoreMatchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.HttpStatus;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.util.ResourceUtils;

@RunWith(SpringRunner.class)
@ExtendWith({SpringExtension.class, TestJobWatcherExtension.class})
@SpringBootTest
@ActiveProfiles({"google", "integrationtest"})
@AutoConfigureMockMvc
@Category(Integration.class)
public class AzureIntegrationTest extends UsersBase {
@Tag(Integration.TAG)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This annotation is necessary to use @BeforeAll and @AfterAll annotations on non-static methods.

class AzureIntegrationTest extends UsersBase {
okotsopoulos marked this conversation as resolved.
Show resolved Hide resolved
private static final Logger logger = LoggerFactory.getLogger(AzureIntegrationTest.class);

private static final String omopDatasetName = "it_dataset_omop";
Expand All @@ -151,7 +153,6 @@ public class AzureIntegrationTest extends UsersBase {
@Autowired private TestConfiguration testConfig;
@Autowired private AzureResourceConfiguration azureResourceConfiguration;
@Autowired private JsonLoader jsonLoader;
@Rule @Autowired public TestJobWatcher testWatcher;

private User steward;
private User admin;
Expand All @@ -167,14 +168,18 @@ public class AzureIntegrationTest extends UsersBase {
private GcsBlobIOTestUtility gcsBlobIOTestUtility;
private Set<String> storageAccounts;

@Before
public void setup() throws Exception {
@BeforeAll
public void createSharedBillingProfile() throws Exception {
super.setup(false);
// Voldemort is required by this test since the application is deployed with his user authz'ed
steward = steward("voldemort");
admin = admin("hermione");
dataRepoFixtures.resetConfig(steward);
profileId = dataRepoFixtures.createAzureBillingProfile(steward).getId();
}

@BeforeEach
public void testSetup() {
RequestRetryOptions retryOptions =
new RequestRetryOptions(
RetryPolicyType.EXPONENTIAL,
Expand All @@ -195,29 +200,41 @@ public void setup() throws Exception {
storageAccounts = new TreeSet<>();
}

@After
public void teardown() throws Exception {
@AfterEach
public void testTeardown() throws Exception {
if (releaseSnapshotId != null) {
snapshotIds.add(releaseSnapshotId);
}
logger.info(
"Teardown: trying to delete snapshots {}, dataset {}, billing profile {}",
snapshotIds,
datasetId,
profileId);
logger.info("Test teardown: trying to delete snapshots {}, dataset {}", snapshotIds, datasetId);

dataRepoFixtures.resetConfig(steward);

for (UUID snapshotAccessRequestId : snapshotAccessRequestIds) {
samFixtures.deleteSnapshotAccessRequest(steward, snapshotAccessRequestId);
}

for (UUID snapshotId : snapshotIds) {
dataRepoFixtures.deleteSnapshot(steward, snapshotId);
}
if (datasetId != null) {
dataRepoFixtures.deleteDataset(steward, datasetId);
}
for (String storageAccountName : storageAccounts) {
deleteCloudResources(storageAccountName);
}
if (azureBlobIOTestUtility != null) {
azureBlobIOTestUtility.teardown();
}
if (gcsBlobIOTestUtility != null) {
gcsBlobIOTestUtility.teardown();
}
if (dac != null) {
samFixtures.deleteGroup(steward, dac);
}
}

@AfterAll
public void deleteSharedBillingProfile() throws Exception {
logger.info("Test class teardown: trying to delete billing profile {}", profileId);
if (profileId != null) {
// TODO - https://broadworkbench.atlassian.net/browse/DCJ-228
// As we move towards running smoke and integration tests in BEEs, we would like to do so
Expand All @@ -231,19 +248,10 @@ public void teardown() throws Exception {
// rather than needing an admin to do it.
dataRepoFixtures.deleteProfileWithCloudResourceDelete(admin, profileId);
}
if (storageAccounts != null) {
storageAccounts.forEach(this::deleteCloudResources);
}
azureBlobIOTestUtility.teardown();
gcsBlobIOTestUtility.teardown();

if (dac != null) {
samFixtures.deleteGroup(steward, dac);
}
}

@Test
public void datasetsHappyPath() throws Exception {
void datasetsHappyPath() throws Exception {
// Note: this region should not be the same as the default region in the application deployment
// (eastus by default)
AzureRegion region = AzureRegion.SOUTH_CENTRAL_US;
Expand Down Expand Up @@ -350,7 +358,7 @@ public void datasetsHappyPath() throws Exception {
return found;
});

assertTrue("dataset was found in enumeration", metExpectation);
assertThat("dataset was found in enumeration", metExpectation);

// This should fail since it currently has dataset storage account within
assertThrows(AssertionError.class, () -> dataRepoFixtures.deleteProfile(steward, profileId));
Expand Down Expand Up @@ -451,7 +459,7 @@ private void populateOmopTable() throws Exception {
}

@Test
public void testSnapshotCreateFromRequest() throws Exception {
void testSnapshotCreateFromRequest() throws Exception {
populateOmopTable();
UUID snapshotRequestId = makeSnapshotAccessRequest().getId();
SnapshotSummaryModel snapshotSummaryByRequest = makeSnapshotFromRequest(snapshotRequestId);
Expand Down Expand Up @@ -511,7 +519,7 @@ private SnapshotSummaryModel makeSnapshotFromRequest(UUID requestSnapshotId) thr
}

@Test
public void testSnapshotBuilder() throws Exception {
void testSnapshotBuilder() throws Exception {
populateOmopTable();

var concept1 =
Expand Down Expand Up @@ -620,7 +628,7 @@ private void testRollupCountsWithCriteriaAndExpected(
}

@Test
public void datasetIngestFileHappyPath() throws Exception {
void datasetIngestFileHappyPath() throws Exception {
String blobName = "myBlob";
long fileSize = MIB / 10;
String sourceFileAzure = azureBlobIOTestUtility.uploadSourceFile(blobName, fileSize);
Expand Down Expand Up @@ -1470,7 +1478,7 @@ sourceFileAzure, getSourceStorageAccountPrimarySharedKey()))
}

@Test
public void testDatasetFileIngestLoadHistory() throws Exception {
void testDatasetFileIngestLoadHistory() throws Exception {
String blobName = "myBlob";
long fileSize = MIB / 10;
String sourceFile = azureBlobIOTestUtility.uploadSourceFile(blobName, fileSize);
Expand Down Expand Up @@ -1586,7 +1594,7 @@ sourceFile, getSourceStorageAccountPrimarySharedKey()))
}

@Test
public void testDatasetFileRefValidation() throws Exception {
void testDatasetFileRefValidation() throws Exception {
DatasetSummaryModel summaryModel =
dataRepoFixtures.createDataset(
steward, profileId, "dataset-ingest-azure-fileref.json", CloudPlatform.AZURE);
Expand Down Expand Up @@ -1716,7 +1724,7 @@ public void testDatasetFileRefValidation() throws Exception {
}

@Test
public void testRequiredColumnsIngest() throws Exception {
void testRequiredColumnsIngest() throws Exception {
DatasetSummaryModel summaryModel =
dataRepoFixtures.createDataset(
steward,
Expand Down Expand Up @@ -1780,16 +1788,16 @@ public void testRequiredColumnsIngest() throws Exception {
}

@Test
public void testDatasetCombinedIngest() throws Exception {
void testDatasetCombinedIngest() throws Exception {
testDatasetCombinedIngest(true);
}

@Test
public void testDatasetCombinedIngestFromApi() throws Exception {
void testDatasetCombinedIngestFromApi() throws Exception {
testDatasetCombinedIngest(false);
}

public void testDatasetCombinedIngest(boolean ingestFromFile) throws Exception {
void testDatasetCombinedIngest(boolean ingestFromFile) throws Exception {
DatasetSummaryModel summaryModel =
dataRepoFixtures.createDataset(
steward, profileId, "dataset-ingest-combined-azure.json", CloudPlatform.AZURE);
Expand Down Expand Up @@ -1831,8 +1839,7 @@ public void testDatasetCombinedIngest(boolean ingestFromFile) throws Exception {
dataRepoFixtures.assertCombinedIngestCorrect(ingestResponse, steward);
}

public void testMetadataArrayIngest(String arrayIngestTableName, Object records)
throws Exception {
void testMetadataArrayIngest(String arrayIngestTableName, Object records) throws Exception {
IngestRequestModel arrayIngestRequest =
new IngestRequestModel()
.ignoreUnknownValues(false)
Expand Down
43 changes: 6 additions & 37 deletions src/test/java/bio/terra/integration/TestJobWatcher.java
Original file line number Diff line number Diff line change
@@ -1,31 +1,17 @@
package bio.terra.integration;

import bio.terra.common.configuration.TestConfiguration;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.time.Instant;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.stringtemplate.v4.ST;

// This class can be used in a rule for integration test to decorate failures to make it easier to
// debug
/**
* This can be used as a Rule for JUnit 4 integration tests. For JUnit 5 integration tests, see
* TestJobWatcherExtension instead. It decorates failures with a link to GCP logs to make them
* easier to debug.
*/
@Component
public class TestJobWatcher extends TestWatcher {
private static final Logger logger = LoggerFactory.getLogger(TestJobWatcher.class);

private static final String QUERY_TEMPLATE =
"resource.type=\"k8s_container\"\n"
+ "resource.labels.project_id=\"broad-jade-integration\"\n"
+ "resource.labels.location=\"us-central1\"\n"
+ "resource.labels.cluster_name=\"integration-master\"\n"
+ "resource.labels.namespace_name=\"integration-<intNumber>\"\n"
+ "labels.k8s-pod/component=\"integration-<intNumber>-jade-datarepo-api";

private final TestConfiguration testConfig;

public TestJobWatcher(TestConfiguration testConfig) {
Expand All @@ -34,23 +20,6 @@ public TestJobWatcher(TestConfiguration testConfig) {

@Override
protected void failed(final Throwable e, final Description description) {
if (testConfig.getIntegrationServerNumber() != null) {
logger.error("See server log info at: {}", getStackdriverUrl());
}
}

private String getStackdriverUrl() {
String query =
URLEncoder.encode(
new ST(QUERY_TEMPLATE)
.add("intNumber", testConfig.getIntegrationServerNumber())
.render(),
StandardCharsets.UTF_8);
return "https://console.cloud.google.com/logs/query;"
+ query
+ ";cursorTimestamp="
+ Instant.now().minus(Duration.ofSeconds(30)).toString()
+ "?project="
+ testConfig.getGoogleProjectId();
TestJobWatcherUtils.emitLinkToGcpLogs(testConfig);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package bio.terra.integration;

import static org.springframework.test.context.junit.jupiter.SpringExtension.getApplicationContext;

import bio.terra.common.configuration.TestConfiguration;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.TestWatcher;

/**
* This is an Extension for JUnit 5 integration tests. For JUnit 4 integration tests, use
* TestJobWatcher as a Rule instead. It decorates failures with a link to GCP logs to make them
* easier to debug.
*/
class TestJobWatcherExtension implements TestWatcher {
@Override
public void testFailed(ExtensionContext context, Throwable cause) {
TestConfiguration testConfig = getApplicationContext(context).getBean(TestConfiguration.class);
TestJobWatcherUtils.emitLinkToGcpLogs(testConfig);
}
}
Loading
Loading