From aa6877f6011c3c9accb76068acf93b3a8200cc89 Mon Sep 17 00:00:00 2001 From: Jorge Lopez <49923512+jorg3lopez@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:22:11 -0800 Subject: [PATCH] Data Access Layer - Database (#707) * Initial commit of docker comose and connection * Autowiring * Adding comments * Re-adding inject * return conn option * Added db inserts and refactored naming * wrapper class for DriverManager and conn * singleton * etorsqldrivermanager registered in app. context * Adding to tests * minor refactoring + getConnection unhappy path unit test * no synchronization, keep it simple * closeConnection unit test unhappy path * refactoring changes, delete try catch * commented out failing tests * all tests passed Mock(Connection) substitured for Connection mockConnection * deleted logger.info that was used for testing * major refactoring of the posgresdao, all tests pass * try catch with resources * Fixing issue with e2e tests * Fixing linter * exclude EtorSqlDriverManager * Adding env props * Added upsertMetadata sqlException unit test - test is passing as expected * WIP: Adding select functionality to retrieve metadata * refactoring of unit test * refactoring upsertMetadat unhappy path unit test * env vars for database - all mocked * ternaries for properties object - all tests pass * @Override annotation for fetchMetadata * refactoring: ApplicationContext if/else for file or db Changes to EtorDomainRegistration, adding uniqueId * Adding mvp select feature and fixing domain registration to allow both local file sender and the database to start * Adding tests and refactoring pgDao * Fixing lint issues * testing terraform * Fixing sku * Fixing pass * removing bad value * Adding missing values * tweaking terraform * further tf tweaks * adding ignore * Updating db name for app * updating auth * Fixing oops * Adding AD auth * Fixing error * Resolving http issue * Test getting a token to read/write from the deployed database * Change the db name * Add a bunch of logging * Register the db implementations when not locally * Added TODO to figure out the right token request URL * Adding default to url * Fixing errors * Trying logger tweak * Updating ssl options * Updating terraform * Fixing formatting of tf file * Updating deploy for internal * Fixing needy terraform * Need space from terraform * Generifying terraform deploy step * Broadening permissions for db * Tweaking terraform * Adding Security access setting default to true * Firewall rules for db * Removing dev sepcific firewalls * Adding dual connection options * 672: throw PartnerMetadataException instead of RuntimeException when cannot save metadata * 672: Cleaned-up exception chain and properly handling no results found with the Optional * 672: Use timestampz to store the date and times for a metadata event * 672: Moved Azure credentialing process to the AzureClient so it can be mocked for the DB tests * Fixing terraform formating * Fixing azure owner issue * Fixing Azure table issue * Removing alter table statment as this breaks the deploy * Re-adding alter statement * Reverting commits * Fixing merge conflicts * Formatting * adding saveParnerMetadat() call * deleted partnerMetadataStorage.saveMetadata() used in testing * add persistence to postgres container and get postgres:16 image * Update docker-compose.postgres.yml * Update docker-compose.postgres.yml * TODO comment reminders to delete code used for testing * refactored code to align with main * registering Azure Client * application fails when FilePartnerMetadataStorage is registered * Reverting EtorDomainRegistration changes to make app run * Fixing failing tests * Adding java-docs * Formatting * Fixing merge issues * Removing comments and logging and updating domain registration * Moving azure client registration * Fix linting --------- Co-authored-by: Jeff Crichlake Co-authored-by: tjohnson7021 <86614374+tjohnson7021@users.noreply.github.com> Co-authored-by: halprin Co-authored-by: jcrichlake <145698165+jcrichlake@users.noreply.github.com> --- .../workflows/terraform-deploy_reusable.yml | 12 ++ build.gradle | 1 + docker-compose.postgres.yml | 17 +++ .../etor/EtorDomainRegistration.java | 20 ++- .../external/azure/AzureClient.java | 8 + .../DatabasePartnerMetadataStorage.java | 32 +++- .../database/EtorSqlDriverManager.java | 24 +++ .../external/database/PostgresDao.java | 129 ++++++++++++++++ .../external/database/PostgresDaoTest.groovy | 140 ++++++++++++++++++ generate_env.sh | 10 +- operations/environments/dev/outputs.tf | 5 + operations/environments/internal/outputs.tf | 4 + operations/environments/pr/outputs.tf | 4 + operations/environments/prod/outputs.tf | 4 + operations/environments/staging/outputs.tf | 4 + operations/template/app.tf | 5 + operations/template/db.tf | 51 +++++++ operations/template/key.tf | 2 +- operations/template/main.tf | 2 + operations/template/outputs.tf | 4 + shared/build.gradle | 3 + .../trustedintermediary/wrappers/DbDao.java | 13 ++ .../wrappers/SqlDriverManager.java | 11 ++ 23 files changed, 497 insertions(+), 8 deletions(-) create mode 100644 docker-compose.postgres.yml create mode 100644 etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/EtorSqlDriverManager.java create mode 100644 etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/PostgresDao.java create mode 100644 etor/src/test/groovy/gov/hhs/cdc/trustedintermediary/external/database/PostgresDaoTest.groovy create mode 100644 operations/template/db.tf create mode 100644 shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/DbDao.java create mode 100644 shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/SqlDriverManager.java diff --git a/.github/workflows/terraform-deploy_reusable.yml b/.github/workflows/terraform-deploy_reusable.yml index 91e179dc4..c0d2a7a4e 100644 --- a/.github/workflows/terraform-deploy_reusable.yml +++ b/.github/workflows/terraform-deploy_reusable.yml @@ -64,6 +64,18 @@ jobs: - name: Terraform Apply run: terraform apply -auto-approve -input=false ${{ inputs.TERRAFORM_APPLY_PARAMETERS }} + - name: Login via Azure CLI + uses: azure/login@v1 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + + - name: Run Db migration + run: | + export PGPASSWORD=$(az account get-access-token --resource-type oss-rdbms --query "[accessToken]" -o tsv) + psql "host=$(terraform output -raw database_hostname) port=5432 dbname=postgres user=cdcti-github sslmode=require" -c "CREATE TABLE IF NOT EXISTS metadata (message_id varchar(30), sender varchar(30), receiver varchar(30), hash_of_order varchar(1000), time_received timestamptz); GRANT ALL ON metadata TO azure_pg_admin; ALTER TABLE metadata OWNER TO azure_pg_admin;" + - id: export-terraform-output name: Export Terraform Output run: | diff --git a/build.gradle b/build.gradle index f273ca083..1e8452d31 100644 --- a/build.gradle +++ b/build.gradle @@ -95,6 +95,7 @@ ext.jacoco_excludes = [ '**/jjwt/JjwtEngine*', '**/apache/ApacheClient*', '**/azure/AzureSecrets*', + '**/database/EtorSqlDriverManager*', '**/azure/AzureClient*' ] diff --git a/docker-compose.postgres.yml b/docker-compose.postgres.yml new file mode 100644 index 000000000..fb706020d --- /dev/null +++ b/docker-compose.postgres.yml @@ -0,0 +1,17 @@ +version: "3.7" + +services: + postgresql: + image: postgres:16 + restart: unless-stopped + environment: + POSTGRES_DB: "intermediary" + POSTGRES_PASSWORD: "changeIT!" + POSTGRES_USER: "intermediary" + ports: + - 5433:5432 + volumes: + - ti_postgres_data:/var/lib/postgresql/data + +volumes: + ti_postgres_data: diff --git a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/etor/EtorDomainRegistration.java b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/etor/EtorDomainRegistration.java index 6842b292e..e890d7315 100644 --- a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/etor/EtorDomainRegistration.java +++ b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/etor/EtorDomainRegistration.java @@ -24,13 +24,18 @@ import gov.hhs.cdc.trustedintermediary.etor.orders.UnableToSendOrderException; import gov.hhs.cdc.trustedintermediary.external.azure.AzureClient; import gov.hhs.cdc.trustedintermediary.external.azure.AzureStorageAccountPartnerMetadataStorage; +import gov.hhs.cdc.trustedintermediary.external.database.DatabasePartnerMetadataStorage; +import gov.hhs.cdc.trustedintermediary.external.database.EtorSqlDriverManager; +import gov.hhs.cdc.trustedintermediary.external.database.PostgresDao; import gov.hhs.cdc.trustedintermediary.external.hapi.HapiOrderConverter; import gov.hhs.cdc.trustedintermediary.external.localfile.FilePartnerMetadataStorage; import gov.hhs.cdc.trustedintermediary.external.localfile.LocalFileOrderSender; import gov.hhs.cdc.trustedintermediary.external.reportstream.ReportStreamEndpointClient; import gov.hhs.cdc.trustedintermediary.external.reportstream.ReportStreamOrderSender; +import gov.hhs.cdc.trustedintermediary.wrappers.DbDao; import gov.hhs.cdc.trustedintermediary.wrappers.FhirParseException; import gov.hhs.cdc.trustedintermediary.wrappers.Logger; +import gov.hhs.cdc.trustedintermediary.wrappers.SqlDriverManager; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -79,15 +84,24 @@ public Map> domainRegistra ApplicationContext.register( ReportStreamEndpointClient.class, ReportStreamEndpointClient.getInstance()); - if (ApplicationContext.getEnvironment().equalsIgnoreCase("local")) { - ApplicationContext.register(OrderSender.class, LocalFileOrderSender.getInstance()); + if (ApplicationContext.getProperty("DB_URL") != null) { + ApplicationContext.register(SqlDriverManager.class, EtorSqlDriverManager.getInstance()); + ApplicationContext.register(DbDao.class, PostgresDao.getInstance()); + ApplicationContext.register( + PartnerMetadataStorage.class, DatabasePartnerMetadataStorage.getInstance()); + } else if (ApplicationContext.getEnvironment().equalsIgnoreCase("local")) { ApplicationContext.register( PartnerMetadataStorage.class, FilePartnerMetadataStorage.getInstance()); } else { - ApplicationContext.register(OrderSender.class, ReportStreamOrderSender.getInstance()); ApplicationContext.register( PartnerMetadataStorage.class, AzureStorageAccountPartnerMetadataStorage.getInstance()); + } + + if (ApplicationContext.getEnvironment().equalsIgnoreCase("local")) { + ApplicationContext.register(OrderSender.class, LocalFileOrderSender.getInstance()); + } else { + ApplicationContext.register(OrderSender.class, ReportStreamOrderSender.getInstance()); ApplicationContext.register(AzureClient.class, AzureClient.getInstance()); } diff --git a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/azure/AzureClient.java b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/azure/AzureClient.java index a6eba492d..fed814be2 100644 --- a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/azure/AzureClient.java +++ b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/azure/AzureClient.java @@ -1,5 +1,6 @@ package gov.hhs.cdc.trustedintermediary.external.azure; +import com.azure.core.credential.TokenRequestContext; import com.azure.identity.DefaultAzureCredentialBuilder; import com.azure.storage.blob.BlobClient; import com.azure.storage.blob.BlobContainerClient; @@ -40,4 +41,11 @@ public static AzureClient getInstance() { public BlobClient getBlobClient(String blobName) { return BLOB_CONTAINER_CLIENT.getBlobClient(blobName); } + + public String getScopedToken(String scope) { + return new DefaultAzureCredentialBuilder() + .build() + .getTokenSync(new TokenRequestContext().addScopes(scope)) + .getToken(); + } } diff --git a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/DatabasePartnerMetadataStorage.java b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/DatabasePartnerMetadataStorage.java index 3d14c8291..5e376e98e 100644 --- a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/DatabasePartnerMetadataStorage.java +++ b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/DatabasePartnerMetadataStorage.java @@ -1,12 +1,20 @@ package gov.hhs.cdc.trustedintermediary.external.database; import gov.hhs.cdc.trustedintermediary.etor.metadata.PartnerMetadata; +import gov.hhs.cdc.trustedintermediary.etor.metadata.PartnerMetadataException; import gov.hhs.cdc.trustedintermediary.etor.metadata.PartnerMetadataStorage; +import gov.hhs.cdc.trustedintermediary.wrappers.DbDao; +import gov.hhs.cdc.trustedintermediary.wrappers.Logger; +import java.sql.SQLException; import java.util.Optional; +import javax.inject.Inject; /** Implements the {@link PartnerMetadataStorage} using a database. */ public class DatabasePartnerMetadataStorage implements PartnerMetadataStorage { + @Inject DbDao dao; + + @Inject Logger logger; private static final DatabasePartnerMetadataStorage INSTANCE = new DatabasePartnerMetadataStorage(); @@ -17,10 +25,28 @@ public static DatabasePartnerMetadataStorage getInstance() { } @Override - public Optional readMetadata(final String uniqueId) { - return Optional.empty(); + public Optional readMetadata(final String uniqueId) + throws PartnerMetadataException { + try { + PartnerMetadata data = (PartnerMetadata) dao.fetchMetadata(uniqueId); + return Optional.ofNullable(data); + } catch (SQLException e) { + throw new PartnerMetadataException("Error retrieving metadata", e); + } } @Override - public void saveMetadata(final PartnerMetadata metadata) {} + public void saveMetadata(final PartnerMetadata metadata) throws PartnerMetadataException { + logger.logInfo("saving the metadata"); + try { + dao.upsertMetadata( + metadata.receivedSubmissionId(), + metadata.sender(), + metadata.receiver(), + metadata.hash(), + metadata.timeReceived()); + } catch (SQLException e) { + throw new PartnerMetadataException("Error saving metadata", e); + } + } } diff --git a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/EtorSqlDriverManager.java b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/EtorSqlDriverManager.java new file mode 100644 index 000000000..d881d4f27 --- /dev/null +++ b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/EtorSqlDriverManager.java @@ -0,0 +1,24 @@ +package gov.hhs.cdc.trustedintermediary.external.database; + +import gov.hhs.cdc.trustedintermediary.wrappers.SqlDriverManager; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Properties; + +/** Wrapper class for SqlDriverManager */ +public class EtorSqlDriverManager implements SqlDriverManager { + + private static final EtorSqlDriverManager INSTANCE = new EtorSqlDriverManager(); + + private EtorSqlDriverManager() {} + + @Override + public Connection getConnection(String url, Properties props) throws SQLException { + return DriverManager.getConnection(url, props); + } + + public static EtorSqlDriverManager getInstance() { + return INSTANCE; + } +} diff --git a/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/PostgresDao.java b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/PostgresDao.java new file mode 100644 index 000000000..2cb147584 --- /dev/null +++ b/etor/src/main/java/gov/hhs/cdc/trustedintermediary/external/database/PostgresDao.java @@ -0,0 +1,129 @@ +package gov.hhs.cdc.trustedintermediary.external.database; + +import gov.hhs.cdc.trustedintermediary.context.ApplicationContext; +import gov.hhs.cdc.trustedintermediary.etor.metadata.PartnerMetadata; +import gov.hhs.cdc.trustedintermediary.external.azure.AzureClient; +import gov.hhs.cdc.trustedintermediary.wrappers.DbDao; +import gov.hhs.cdc.trustedintermediary.wrappers.Logger; +import gov.hhs.cdc.trustedintermediary.wrappers.SqlDriverManager; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.Properties; +import javax.inject.Inject; + +/** Class for accessing and managing data for the postgres Database */ +public class PostgresDao implements DbDao { + + private static final PostgresDao INSTANCE = new PostgresDao(); + + @Inject Logger logger; + @Inject SqlDriverManager driverManager; + @Inject AzureClient azureClient; + + private PostgresDao() {} + + protected Connection connect() throws SQLException { + Connection conn; + String url = + "jdbc:postgresql://" + + ApplicationContext.getProperty("DB_URL") + + ":" + + ApplicationContext.getProperty("DB_PORT") + + "/" + + ApplicationContext.getProperty("DB_NAME"); + + logger.logInfo("going to connect to db url {}", url); + + // Ternaries prevent NullPointerException during testing since we decided not to mock env + // vars. + String user = + ApplicationContext.getProperty("DB_USER") == null + ? "" + : ApplicationContext.getProperty("DB_USER"); + String pass = + ApplicationContext.getProperty("DB_PASS") == null + ? "" + : ApplicationContext.getProperty("DB_PASS"); + String ssl = + ApplicationContext.getProperty("DB_SSL") == null + ? "" + : ApplicationContext.getProperty("DB_SSL"); + + Properties props = new Properties(); + props.setProperty("user", user); + logger.logInfo("About to get the db password"); + + String token = + pass.isBlank() + ? azureClient.getScopedToken( + "https://ossrdbms-aad.database.windows.net/.default") + : pass; + + logger.logInfo("got the db password"); + + props.setProperty("password", token); + + // If the below prop isn't set to require and we just set ssl=true it will expect a CA cert + // in azure which breaks it + props.setProperty("sslmode", ssl); + conn = driverManager.getConnection(url, props); + logger.logInfo("DB Connected Successfully"); + return conn; + } + + public static PostgresDao getInstance() { + return INSTANCE; + } + + @Override + public synchronized void upsertMetadata( + String receivedSubmissionId, + String sender, + String receiver, + String hash, + Instant timeReceived) + throws SQLException { + + try (Connection conn = connect(); + PreparedStatement statement = + conn.prepareStatement("INSERT INTO metadata VALUES (?, ?, ?, ?, ?)")) { + // TODO: Update the below statement to handle on conflict, after we figure out what that + // behavior should be + statement.setString(1, receivedSubmissionId); + statement.setString(2, sender); + statement.setString(3, receiver); + statement.setString(4, hash); + statement.setTimestamp(5, Timestamp.from(timeReceived)); + + statement.executeUpdate(); + } + } + + @Override + public synchronized PartnerMetadata fetchMetadata(String receivedSubmissionId) + throws SQLException { + try (Connection conn = connect(); + PreparedStatement statement = + conn.prepareStatement("SELECT * FROM metadata where message_id = ?")) { + + statement.setString(1, receivedSubmissionId); + + ResultSet result = statement.executeQuery(); + + var hasValidData = result.next(); + if (!hasValidData) { + return null; + } + + return new PartnerMetadata( + result.getString("message_id"), + result.getString("receiver"), + result.getTimestamp("time_received").toInstant(), + result.getString("hash_of_order")); + } + } +} diff --git a/etor/src/test/groovy/gov/hhs/cdc/trustedintermediary/external/database/PostgresDaoTest.groovy b/etor/src/test/groovy/gov/hhs/cdc/trustedintermediary/external/database/PostgresDaoTest.groovy new file mode 100644 index 000000000..a1516514d --- /dev/null +++ b/etor/src/test/groovy/gov/hhs/cdc/trustedintermediary/external/database/PostgresDaoTest.groovy @@ -0,0 +1,140 @@ +package gov.hhs.cdc.trustedintermediary.external.database + +import gov.hhs.cdc.trustedintermediary.context.TestApplicationContext +import gov.hhs.cdc.trustedintermediary.etor.metadata.PartnerMetadata +import gov.hhs.cdc.trustedintermediary.external.azure.AzureClient +import gov.hhs.cdc.trustedintermediary.wrappers.SqlDriverManager +import spock.lang.Specification + +import java.sql.Timestamp +import java.time.Instant + +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.sql.SQLException + +class PostgresDaoTest extends Specification { + + def setup() { + TestApplicationContext.reset() + TestApplicationContext.init() + + def mockAzureClient = Mock(AzureClient) + mockAzureClient.getScopedToken(_ as String) >> "DogCow password" + TestApplicationContext.register(AzureClient, mockAzureClient) + + TestApplicationContext.register(PostgresDao, PostgresDao.getInstance()) + } + + def "connect happy path works"(){ + given: + def mockDriver = Mock(SqlDriverManager) + Connection mockConn + mockDriver.getConnection(_ as String, _ as Properties) >> {mockConn} + + TestApplicationContext.register(SqlDriverManager, mockDriver) + TestApplicationContext.injectRegisteredImplementations() + + when: + def conn = PostgresDao.getInstance().connect() + + then: + conn == mockConn + } + + def "connect unhappy path throws exception"() { + given: + def mockDriver = Mock(SqlDriverManager) + mockDriver.getConnection(_ as String, _ as Properties) >> {throw new SQLException()} + TestApplicationContext.register(SqlDriverManager, mockDriver) + TestApplicationContext.injectRegisteredImplementations() + + when: + PostgresDao.getInstance().connect() + + then: + thrown(SQLException) + } + + def "upsertMetadata works"() { + given: + def upsertMockDriver = Mock(SqlDriverManager) + Connection upsertMockConn = Mock(Connection) + PreparedStatement upsertMockStatement = Mock(PreparedStatement) + + upsertMockDriver.getConnection(_ as String, _ as Properties) >> upsertMockConn + upsertMockConn.prepareStatement(_ as String) >> upsertMockStatement + + TestApplicationContext.register(SqlDriverManager, upsertMockDriver) + TestApplicationContext.injectRegisteredImplementations() + + when: + PostgresDao.getInstance().upsertMetadata("mock_id", "mock_sender", "mock_receiver", "mock_hash", Instant.now()) + + then: + 1 * upsertMockStatement.executeUpdate() + } + + + def "upsertMetadata unhappy path throws exception"() { + given: + def upsertMockDriver = Mock(SqlDriverManager) + Connection upsertMockConn = Mock(Connection) + + upsertMockDriver.getConnection(_ as String, _ as Properties) >> upsertMockConn + upsertMockConn.prepareStatement(_ as String) >> { throw new SQLException() } + + + TestApplicationContext.register(SqlDriverManager, upsertMockDriver) + TestApplicationContext.injectRegisteredImplementations() + + when: + PostgresDao.getInstance().upsertMetadata("mock_id", "mock_sender", "mock_receiver", "mock_hash", Instant.now()) + + then: + thrown(SQLException) + } + + def "select metadata retrieves data"(){ + given: + def selectMockDriver = Mock(SqlDriverManager) + Connection selectMockConn = Mock(Connection) + PreparedStatement selectPreparedStatement = Mock(PreparedStatement) + ResultSet selectResultSet = Mock(ResultSet) + + selectMockDriver.getConnection(_ as String, _ as Properties) >> selectMockConn + selectMockConn.prepareStatement(_ as String) >> selectPreparedStatement + selectPreparedStatement.executeQuery() >> selectResultSet + selectResultSet.next() >> true + selectResultSet.getTimestamp(_ as String) >> Timestamp.from(Instant.now()) + + TestApplicationContext.register(SqlDriverManager, selectMockDriver) + TestApplicationContext.injectRegisteredImplementations() + + when: + PartnerMetadata result = (PartnerMetadata) PostgresDao.getInstance().fetchMetadata("mock_sender") + + then: + result != null + } + + def "fetchMetadata unhappy path throws exception"() { + given: + def selectMockDriver = Mock(SqlDriverManager) + Connection selectMockConn = Mock(Connection) + + selectMockDriver.getConnection(_ as String, _ as Properties) >> selectMockConn + selectMockConn.prepareStatement(_ as String) >> { throw new SQLException() } + + + TestApplicationContext.register(SqlDriverManager, selectMockDriver) + TestApplicationContext.injectRegisteredImplementations() + + when: + PostgresDao.getInstance().fetchMetadata("mock_lookup") + + then: + thrown(SQLException) + } +} diff --git a/generate_env.sh b/generate_env.sh index cb7d3809a..784e8efc2 100755 --- a/generate_env.sh +++ b/generate_env.sh @@ -1,11 +1,19 @@ #!/bin/bash # Define the content of the .env file + + env_content="ENV=local KEY_VAULT-NAME=ti-key-vault-name REPORT_STREAM_URL_PREFIX=http://localhost:7071 STORAGE_ACCOUNT_BLOB_ENDPOINT=https://cdctiinternal.blob.core.windows.net -METADATA_CONTAINER_NAME=metadata" +METADATA_CONTAINER_NAME=metadata +DB_URL=localhost +DB_PORT=5433 +DB_NAME=intermediary +DB_USER=intermediary +DB_PASS=changeIT! +DB_SSL=require" # Get directory of script file script_dir="$(dirname "$0")" diff --git a/operations/environments/dev/outputs.tf b/operations/environments/dev/outputs.tf index ce3382511..7535c5617 100644 --- a/operations/environments/dev/outputs.tf +++ b/operations/environments/dev/outputs.tf @@ -1,6 +1,11 @@ output "registry" { value = module.template.registry } + output "publish_app" { value = module.template.publish_app } + +output "database_hostname" { + value = module.template.database_hostname +} \ No newline at end of file diff --git a/operations/environments/internal/outputs.tf b/operations/environments/internal/outputs.tf index e24968bcd..7535c5617 100644 --- a/operations/environments/internal/outputs.tf +++ b/operations/environments/internal/outputs.tf @@ -5,3 +5,7 @@ output "registry" { output "publish_app" { value = module.template.publish_app } + +output "database_hostname" { + value = module.template.database_hostname +} \ No newline at end of file diff --git a/operations/environments/pr/outputs.tf b/operations/environments/pr/outputs.tf index e24968bcd..7535c5617 100644 --- a/operations/environments/pr/outputs.tf +++ b/operations/environments/pr/outputs.tf @@ -5,3 +5,7 @@ output "registry" { output "publish_app" { value = module.template.publish_app } + +output "database_hostname" { + value = module.template.database_hostname +} \ No newline at end of file diff --git a/operations/environments/prod/outputs.tf b/operations/environments/prod/outputs.tf index e24968bcd..7535c5617 100644 --- a/operations/environments/prod/outputs.tf +++ b/operations/environments/prod/outputs.tf @@ -5,3 +5,7 @@ output "registry" { output "publish_app" { value = module.template.publish_app } + +output "database_hostname" { + value = module.template.database_hostname +} \ No newline at end of file diff --git a/operations/environments/staging/outputs.tf b/operations/environments/staging/outputs.tf index e24968bcd..7535c5617 100644 --- a/operations/environments/staging/outputs.tf +++ b/operations/environments/staging/outputs.tf @@ -5,3 +5,7 @@ output "registry" { output "publish_app" { value = module.template.publish_app } + +output "database_hostname" { + value = module.template.database_hostname +} \ No newline at end of file diff --git a/operations/template/app.tf b/operations/template/app.tf index 16c95f5f7..e7aaaf377 100644 --- a/operations/template/app.tf +++ b/operations/template/app.tf @@ -36,6 +36,11 @@ resource "azurerm_linux_web_app" "api" { KEY_VAULT_NAME = azurerm_key_vault.key_storage.name STORAGE_ACCOUNT_BLOB_ENDPOINT = azurerm_storage_account.storage.primary_blob_endpoint METADATA_CONTAINER_NAME = azurerm_storage_container.metadata.name + DB_URL = azurerm_postgresql_flexible_server.database.fqdn + DB_PORT = "5432" + DB_NAME = "postgres" + DB_USER = "cdcti-internal-api" + DB_SSL = "require" } identity { diff --git a/operations/template/db.tf b/operations/template/db.tf new file mode 100644 index 000000000..06333f379 --- /dev/null +++ b/operations/template/db.tf @@ -0,0 +1,51 @@ +data "azuread_service_principal" "principal" { + object_id = data.azurerm_client_config.current.object_id +} + +resource "azurerm_postgresql_flexible_server" "database" { + name = "cdcti-${var.environment}-database" + resource_group_name = data.azurerm_resource_group.group.name + location = data.azurerm_resource_group.group.location + sku_name = "B_Standard_B1ms" + version = "16" + storage_mb = "32768" + backup_retention_days = "14" + + authentication { + password_auth_enabled = "false" + active_directory_auth_enabled = "true" + tenant_id = data.azurerm_client_config.current.tenant_id + } + + lifecycle { + ignore_changes = [ + zone, + high_availability.0.standby_availability_zone + ] + } +} + +resource "azurerm_postgresql_flexible_server_active_directory_administrator" "admin_for_deployer" { + server_name = azurerm_postgresql_flexible_server.database.name + resource_group_name = data.azurerm_resource_group.group.name + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = var.deployer_id + principal_name = "cdcti-github" + principal_type = "ServicePrincipal" +} + +resource "azurerm_postgresql_flexible_server_active_directory_administrator" "admin_for_app" { + server_name = azurerm_postgresql_flexible_server.database.name + resource_group_name = data.azurerm_resource_group.group.name + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = azurerm_linux_web_app.api.identity.0.principal_id + principal_name = azurerm_linux_web_app.api.name + principal_type = "ServicePrincipal" +} + +resource "azurerm_postgresql_flexible_server_firewall_rule" "db_firewall_5" { + name = "AllowAzure" + server_id = azurerm_postgresql_flexible_server.database.id + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} \ No newline at end of file diff --git a/operations/template/key.tf b/operations/template/key.tf index dcb1e1ec0..fe1dcdca3 100644 --- a/operations/template/key.tf +++ b/operations/template/key.tf @@ -82,4 +82,4 @@ resource "azurerm_key_vault_secret" "trusted_intermediary_private_key" { depends_on = [azurerm_key_vault_access_policy.allow_github_deployer] //wait for the permission that allows our deployer to write the secret } -data "azurerm_client_config" "current" {} + diff --git a/operations/template/main.tf b/operations/template/main.tf index 37923f8ee..6dee665d5 100644 --- a/operations/template/main.tf +++ b/operations/template/main.tf @@ -11,3 +11,5 @@ locals { data "azurerm_resource_group" "group" { name = "csels-rsti-${var.environment}-moderate-rg" } + +data "azurerm_client_config" "current" {} \ No newline at end of file diff --git a/operations/template/outputs.tf b/operations/template/outputs.tf index 0990abab3..68b20b901 100644 --- a/operations/template/outputs.tf +++ b/operations/template/outputs.tf @@ -5,3 +5,7 @@ output "registry" { output "publish_app" { value = azurerm_linux_web_app.api.name } + +output "database_hostname" { + value = azurerm_postgresql_flexible_server.database.fqdn +} \ No newline at end of file diff --git a/shared/build.gradle b/shared/build.gradle index 9809cd4b3..52fa69601 100644 --- a/shared/build.gradle +++ b/shared/build.gradle @@ -55,6 +55,9 @@ dependencies { // dotenv-java implementation 'io.github.cdimascio:dotenv-java:3.0.0' + + // postgres + implementation 'org.postgresql:postgresql:42.7.0' } jacocoTestCoverageVerification { diff --git a/shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/DbDao.java b/shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/DbDao.java new file mode 100644 index 000000000..47943fa22 --- /dev/null +++ b/shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/DbDao.java @@ -0,0 +1,13 @@ +package gov.hhs.cdc.trustedintermediary.wrappers; + +import java.sql.SQLException; +import java.time.Instant; + +/** Interface for accessing the database for metadata */ +public interface DbDao { + void upsertMetadata( + String id, String sender, String receiver, String hash, Instant timeReceived) + throws SQLException; + + Object fetchMetadata(String uniqueId) throws SQLException; +} diff --git a/shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/SqlDriverManager.java b/shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/SqlDriverManager.java new file mode 100644 index 000000000..1989a163b --- /dev/null +++ b/shared/src/main/java/gov/hhs/cdc/trustedintermediary/wrappers/SqlDriverManager.java @@ -0,0 +1,11 @@ +package gov.hhs.cdc.trustedintermediary.wrappers; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; + +/** Interface for SqlDriverManager, this allows for easier testing */ +public interface SqlDriverManager { + + Connection getConnection(String url, Properties props) throws SQLException; +}