Skip to content
This repository has been archived by the owner on Feb 7, 2025. It is now read-only.

Commit

Permalink
Merge branch 'main' into devex/1247-add_impact_sections_to_adrs_012-016
Browse files Browse the repository at this point in the history
  • Loading branch information
tjohnson7021 authored Dec 4, 2024
2 parents eeceec2 + 96fc5c3 commit 8c6aa19
Show file tree
Hide file tree
Showing 23 changed files with 107 additions and 98 deletions.
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ creates a `.env` file in the resource folder with the required configuration

### Using a local database

Use [docker-compose.postgres.yml](docker-compose.postgres.yml) to run your local DB. In IntelliJ, you can click the play arrow to start it
Use [docker-compose.yml](docker-compose.yml) to run your local DB. In IntelliJ, you can click the play arrow to start it

![docker-postgres.png](images/docker-postgres.png)

Expand Down Expand Up @@ -466,3 +466,8 @@ Please refer to [CDC's Template Repository](https://github.com/CDCgov/template)
for more information about [contributing to this repository](https://github.com/CDCgov/template/blob/master/CONTRIBUTING.md),
[public domain notices and disclaimers](https://github.com/CDCgov/template/blob/master/DISCLAIMER.md),
and [code of conduct](https://github.com/CDCgov/template/blob/master/code-of-conduct.md).

### Troubleshooting

Error: copier: stat: "/app/build/libs/app-all.jar": no such file or directory
Solution: Run ./gradlew shadowjar first
2 changes: 1 addition & 1 deletion app/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ dependencies {
testImplementation 'org.apache.groovy:groovy:4.0.24'
testImplementation 'org.spockframework:spock-core:2.3-groovy-4.0'
testImplementation 'com.openpojo:openpojo:0.9.1'
testImplementation 'nl.jqno.equalsverifier:equalsverifier:3.17.3'
testImplementation 'nl.jqno.equalsverifier:equalsverifier:3.17.4'
}

jacocoTestCoverageVerification {
Expand Down
23 changes: 0 additions & 23 deletions docker-compose.postgres.yml

This file was deleted.

22 changes: 22 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
# Run the following to migrate...
# liquibase update --changelog-file ./etor/databaseMigrations/root.yml --url jdbc:postgresql://localhost:5433/intermediary --username intermediary --password 'changeIT!' --label-filter '!azure'

# Run the following to rollback...
# liquibase rollback-count --changelog-file ./etor/databaseMigrations/root.yml --url jdbc:postgresql://localhost:5433/intermediary --username intermediary --password 'changeIT!' --count 2


version: "3.7"
services:
router:
Expand All @@ -7,3 +14,18 @@ services:
ports:
- "8080:8080" # default api endpoint port
- "6006:6006" # Java debug port

postgresql:
image: postgres:16
restart: unless-stopped
environment:
POSTGRES_DB: "intermediary"
POSTGRES_PASSWORD: "changeIT!" # pragma: allowlist secret
POSTGRES_USER: "intermediary"
ports:
- 5433:5432
volumes:
- ti_postgres_data:/var/lib/postgresql/data

volumes:
ti_postgres_data:
4 changes: 2 additions & 2 deletions e2e/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ dependencies {
implementation 'org.apache.httpcomponents.client5:httpclient5-fluent:5.4.1'

//jackson
implementation 'com.fasterxml.jackson.core:jackson-core:2.18.1'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.18.1'
implementation 'com.fasterxml.jackson.core:jackson-core:2.18.2'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.18.2'

//fhir
implementation 'ca.uhn.hapi.fhir:hapi-fhir-base:7.6.0'
Expand Down
2 changes: 1 addition & 1 deletion etor/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ dependencies {
testImplementation 'org.apache.groovy:groovy:4.0.24'
testImplementation 'org.spockframework:spock-core:2.3-groovy-4.0'
testImplementation 'com.openpojo:openpojo:0.9.1'
testImplementation 'nl.jqno.equalsverifier:equalsverifier:3.17.3'
testImplementation 'nl.jqno.equalsverifier:equalsverifier:3.17.4'
}

jacocoTestCoverageVerification {
Expand Down
2 changes: 1 addition & 1 deletion operations/environments/dev/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "4.11.0"
version = "4.12.0"
}
}

Expand Down
2 changes: 1 addition & 1 deletion operations/environments/internal/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "4.11.0"
version = "4.12.0"
}
}

Expand Down
2 changes: 1 addition & 1 deletion operations/environments/pr/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "4.11.0"
version = "4.12.0"
}
}

Expand Down
2 changes: 1 addition & 1 deletion operations/environments/prd/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "4.11.0"
version = "4.12.0"
}
}

Expand Down
2 changes: 1 addition & 1 deletion operations/environments/stg/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "4.11.0"
version = "4.12.0"
}
}

Expand Down
4 changes: 2 additions & 2 deletions rs-e2e/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ dependencies {
testImplementation testFixtures(project(':shared'))

//jackson
implementation 'com.fasterxml.jackson.core:jackson-core:2.18.1'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.18.1'
implementation 'com.fasterxml.jackson.core:jackson-core:2.18.2'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.18.2'

// azure
implementation 'com.azure:azure-storage-blob:12.29.0'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import ca.uhn.hl7v2.HL7Exception;
import ca.uhn.hl7v2.HapiContext;
import ca.uhn.hl7v2.model.Message;
import ca.uhn.hl7v2.model.v251.segment.MSH;
import ca.uhn.hl7v2.parser.Parser;
import com.google.common.collect.Sets;
import gov.hhs.cdc.trustedintermediary.rse2e.HL7FileStream;
Expand Down Expand Up @@ -32,13 +31,13 @@ public static HapiHL7FileMatcher getInstance() {
return INSTANCE;
}

public Map<Message, Message> matchFiles(
public Map<HapiHL7Message, HapiHL7Message> matchFiles(
List<HL7FileStream> outputFiles, List<HL7FileStream> inputFiles)
throws HapiHL7FileMatcherException {
// We pair up output and input files based on the control ID, which is in MSH-10
// Any files (either input or output) that don't have a match are logged
Map<String, Message> inputMap = mapMessageByControlId(inputFiles);
Map<String, Message> outputMap = mapMessageByControlId(outputFiles);
Map<String, HapiHL7Message> inputMap = parseAndMapMessageByControlId(inputFiles);
Map<String, HapiHL7Message> outputMap = parseAndMapMessageByControlId(outputFiles);

Set<String> inputKeys = inputMap.keySet();
Set<String> outputKeys = outputMap.keySet();
Expand All @@ -55,10 +54,10 @@ public Map<Message, Message> matchFiles(
return inputKeys.stream().collect(Collectors.toMap(inputMap::get, outputMap::get));
}

public Map<String, Message> mapMessageByControlId(List<HL7FileStream> files)
public Map<String, HapiHL7Message> parseAndMapMessageByControlId(List<HL7FileStream> files)
throws HapiHL7FileMatcherException {

Map<String, Message> messageMap = new HashMap<>();
Map<String, HapiHL7Message> messageMap = new HashMap<>();

try (HapiContext context = new DefaultHapiContext()) {
Parser parser = context.getPipeParser();
Expand All @@ -68,13 +67,13 @@ public Map<String, Message> mapMessageByControlId(List<HL7FileStream> files)
try (InputStream inputStream = hl7FileStream.inputStream()) {
String content = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
Message message = parser.parse(content);
MSH mshSegment = (MSH) message.get("MSH");
String msh10 = mshSegment.getMessageControlID().getValue();
HapiHL7Message hapiHL7Message = new HapiHL7Message(message);
String msh10 = hapiHL7Message.getIdentifier();
if (msh10 == null || msh10.isEmpty()) {
throw new HapiHL7FileMatcherException(
String.format("MSH-10 is empty for file: %s", fileName));
}
messageMap.put(msh10, message);
messageMap.put(msh10, hapiHL7Message);
} catch (HL7Exception e) {
throw new HapiHL7FileMatcherException(
String.format("Failed to parse HL7 message from file: %s", fileName),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package gov.hhs.cdc.trustedintermediary.rse2e.external.hapi;

import ca.uhn.hl7v2.HL7Exception;
import ca.uhn.hl7v2.model.Message;
import ca.uhn.hl7v2.model.v251.segment.MSH;
import gov.hhs.cdc.trustedintermediary.wrappers.HealthData;

/**
Expand All @@ -21,7 +23,12 @@ public Message getUnderlyingData() {
}

@Override
public String getName() {
return underlyingData.getName();
public String getIdentifier() {
try {
MSH mshSegment = (MSH) underlyingData.get("MSH");
return mshSegment.getMessageControlID().getValue();
} catch (HL7Exception e) {
return null;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public final void runRule(HealthData<?>... data) {
+ "': "
+ assertion
+ " ("
+ outputData.getName()
+ outputData.getIdentifier()
+ ")");
}
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
package gov.hhs.cdc.trustedintermediary.rse2e.ruleengine;

import ca.uhn.hl7v2.model.Message;
import gov.hhs.cdc.trustedintermediary.rse2e.external.hapi.HapiHL7Message;
import gov.hhs.cdc.trustedintermediary.ruleengine.RuleLoader;
import gov.hhs.cdc.trustedintermediary.ruleengine.RuleLoaderException;
import gov.hhs.cdc.trustedintermediary.wrappers.HealthData;
import gov.hhs.cdc.trustedintermediary.wrappers.Logger;
import gov.hhs.cdc.trustedintermediary.wrappers.formatter.TypeReference;
import java.io.IOException;
Expand Down Expand Up @@ -64,21 +63,18 @@ public List<AssertionRule> getRules() {
return assertionRules;
}

public Set<AssertionRule> runRules(Message outputMessage, Message inputMessage) {
public Set<AssertionRule> runRules(HealthData<?> outputMessage, HealthData<?> inputMessage) {
try {
ensureRulesLoaded();
} catch (RuleLoaderException e) {
logger.logError("Failed to load rules definitions", e);
return Set.of();
}

HapiHL7Message outputHapiMessage = new HapiHL7Message(outputMessage);
HapiHL7Message inputHapiMessage = new HapiHL7Message(inputMessage);

Set<AssertionRule> runRules = new HashSet<>();
for (AssertionRule rule : assertionRules) {
if (rule.shouldRun(outputHapiMessage)) {
rule.runRule(outputHapiMessage, inputHapiMessage);
if (rule.shouldRun(outputMessage)) {
rule.runRule(outputMessage, inputMessage);
runRules.add(rule);
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package gov.hhs.cdc.trustedintermediary.rse2e

import ca.uhn.hl7v2.model.Message
import gov.hhs.cdc.trustedintermediary.context.TestApplicationContext
import gov.hhs.cdc.trustedintermediary.rse2e.external.hapi.HapiHL7FileMatcher
import gov.hhs.cdc.trustedintermediary.rse2e.external.hapi.HapiHL7ExpressionEvaluator
Expand Down Expand Up @@ -65,8 +64,8 @@ class AutomatedTest extends Specification {

when:
for (messagePair in matchedFiles) {
Message inputMessage = messagePair.getKey() as Message
Message outputMessage = messagePair.getValue() as Message
def inputMessage = messagePair.getKey()
def outputMessage = messagePair.getValue()
def evaluatedRules = engine.runRules(outputMessage, inputMessage)
rulesToEvaluate.removeAll(evaluatedRules)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package gov.hhs.cdc.trustedintermediary.rse2e.external.hapi

import ca.uhn.hl7v2.model.Message
import gov.hhs.cdc.trustedintermediary.context.TestApplicationContext
import gov.hhs.cdc.trustedintermediary.rse2e.HL7FileStream
import gov.hhs.cdc.trustedintermediary.wrappers.Logger
Expand Down Expand Up @@ -31,15 +30,15 @@ class HapiHL7FileMatcherTest extends Specification {
new HL7FileStream("outputFileStream1", Mock(InputStream)),
new HL7FileStream("outputFileStream2", Mock(InputStream))
]
def mockInputMessage1 = Mock(Message)
def mockInputMessage2 = Mock(Message)
def mockOutputMessage1 = Mock(Message)
def mockOutputMessage2 = Mock(Message)
spyFileMatcher.mapMessageByControlId(mockInputFiles) >> [ "001": mockInputMessage1, "002": mockInputMessage2 ]
spyFileMatcher.mapMessageByControlId(mockOutputFiles) >> [ "001": mockOutputMessage1, "002": mockOutputMessage2 ]
def mockInputMessage1 = Mock(HapiHL7Message)
def mockInputMessage2 = Mock(HapiHL7Message)
def mockOutputMessage1 = Mock(HapiHL7Message)
def mockOutputMessage2 = Mock(HapiHL7Message)
spyFileMatcher.parseAndMapMessageByControlId(mockInputFiles) >> ["001": mockInputMessage1, "002": mockInputMessage2 ]
spyFileMatcher.parseAndMapMessageByControlId(mockOutputFiles) >> ["001": mockOutputMessage1, "002": mockOutputMessage2 ]

when:
def result =spyFileMatcher.matchFiles(mockOutputFiles, mockInputFiles)
def result = spyFileMatcher.matchFiles(mockOutputFiles, mockInputFiles)

then:
result.size() == 2
Expand All @@ -61,8 +60,8 @@ class HapiHL7FileMatcherTest extends Specification {
mockOutputFiles = [
new HL7FileStream("matchingOutputFileStream", Mock(InputStream))
]
spyFileMatcher.mapMessageByControlId(mockInputFiles) >> [ "001": Mock(Message), "002": Mock(Message) ]
spyFileMatcher.mapMessageByControlId(mockOutputFiles) >> [ "001": Mock(Message) ]
spyFileMatcher.parseAndMapMessageByControlId(mockInputFiles) >> ["001": Mock(HapiHL7Message), "002": Mock(HapiHL7Message) ]
spyFileMatcher.parseAndMapMessageByControlId(mockOutputFiles) >> ["001": Mock(HapiHL7Message) ]
spyFileMatcher.matchFiles(mockOutputFiles, mockInputFiles)

then:
Expand All @@ -80,8 +79,8 @@ class HapiHL7FileMatcherTest extends Specification {
new HL7FileStream("matchingOutputFileStream", Mock(InputStream)),
new HL7FileStream("nonMatchingOutputFileStream", Mock(InputStream))
]
spyFileMatcher.mapMessageByControlId(mockInputFiles) >> [ "001": Mock(Message) ]
spyFileMatcher.mapMessageByControlId(mockOutputFiles) >> [ "001": Mock(Message), "003": Mock(Message) ]
spyFileMatcher.parseAndMapMessageByControlId(mockInputFiles) >> ["001": Mock(HapiHL7Message) ]
spyFileMatcher.parseAndMapMessageByControlId(mockOutputFiles) >> ["001": Mock(HapiHL7Message), "003": Mock(HapiHL7Message) ]
spyFileMatcher.matchFiles(mockOutputFiles, mockInputFiles)

then:
Expand Down Expand Up @@ -110,14 +109,16 @@ class HapiHL7FileMatcherTest extends Specification {
]

when:
def result = fileMatcher.mapMessageByControlId(mockFiles)
def result = fileMatcher.parseAndMapMessageByControlId(mockFiles)

then:
result.size() == 2
result[file1Msh10] != null
file1MshSegment == result[file1Msh10].encode().trim()
result[file2Msh10] != null
file2MshSegment == result[file2Msh10].encode().trim()
def message1 = result[file1Msh10]
def message2 = result[file2Msh10]
message1 != null
message2 != null
file1MshSegment == message1.getUnderlyingData().encode().trim()
file2MshSegment == message2.getUnderlyingData().encode().trim()
}

def "should throw HapiHL7FileMatcherException when MSH-10 is empty"() {
Expand All @@ -130,7 +131,7 @@ class HapiHL7FileMatcherTest extends Specification {
def hl7FileStream = new HL7FileStream("file1", inputStream)

when:
fileMatcher.mapMessageByControlId([hl7FileStream])
fileMatcher.parseAndMapMessageByControlId([hl7FileStream])

then:
thrown(HapiHL7FileMatcherException)
Expand All @@ -142,7 +143,7 @@ class HapiHL7FileMatcherTest extends Specification {
def hl7FileStream = new HL7FileStream("badFile", inputStream)

when:
fileMatcher.mapMessageByControlId([hl7FileStream])
fileMatcher.parseAndMapMessageByControlId([hl7FileStream])

then:
thrown(HapiHL7FileMatcherException)
Expand Down
Loading

0 comments on commit 8c6aa19

Please sign in to comment.