Skip to content

Commit

Permalink
support for access heme
Browse files Browse the repository at this point in the history
- CVR fetcher: new endpoint
- automation_env: new study directory parallel to impact, archer, access, etc.
- fetch_dmp_data: calls for fetching cvr, generating supp data, merging into solid_heme
- dmp-import-vars: redcap support
  • Loading branch information
Avery Wang authored and callachennault committed Feb 20, 2024
1 parent c61c14e commit 1415361
Show file tree
Hide file tree
Showing 8 changed files with 159 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ private static Options getOptions(String[] args) {
.addOption("j", "json", false, "To read or not to read. This can be used alone or in combination with --gml")
.addOption("g", "gml", false, "Run germline job")
.addOption("s", "skipSeg", false, "Flag to skip fetching seg data")
.addOption("i", "study_id", true, "Study identifier (i.e., mskimpact, mskraindance, mskarcher, mskimpact_heme)")
.addOption("i", "study_id", true, "Study identifier (i.e., mskimpact, mskraindance, mskarcher, mskimpact_heme, mskaccess, mskaccess_heme)")
.addOption("t", "test", false, "Flag for running pipeline in testing mode so that samples are not requeued or consumed")
.addOption("c", "consume_samples", true, "Path to CVR json filename")
.addOption("r", "max_samples_to_remove", true, "The max number of samples that can be removed from data")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,12 @@ public class SessionConfiguration {
@Value("${dmp.tokens.retrieve_master_list.access}")
private String dmpMasterListAccess;

@Value("${dmp.tokens.retrieve_variants.accessheme")
private String retrieveVariantsAccessHeme;

@Value("${dmp.tokens.retrieve_master_list.accessheme}")
private String dmpMasterListAccessHeme;

private Logger log = Logger.getLogger(SessionConfiguration.class);

@Bean
Expand All @@ -106,7 +112,7 @@ public Map<String, String> retrieveVariantTokensMap() {
map.put("mskimpact_heme", retrieveVariantsHeme);
map.put("mskarcher", retrieveVariantsArcher);
map.put("mskaccess", retrieveVariantsAccess);

map.put("mskaccess_heme", retrieveVariantsAccessHeme);
return map;
}

Expand All @@ -118,7 +124,7 @@ public Map<String, String> masterListTokensMap() {
map.put("mskimpact_heme", dmpMasterListHeme);
map.put("mskarcher", dmpMasterListArcher);
map.put("mskaccess", dmpMasterListAccess);

map.put("mskaccess_heme", retrieveVariantsAccessHeme);
return map;
}

Expand Down
24 changes: 24 additions & 0 deletions import-scripts/backup-redcap-data.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@ MSKIMPACT_REDCAP_EXPORT_FAIL=0
HEMEPACT_REDCAP_EXPORT_FAIL=0
ARCHER_REDCAP_EXPORT_FAIL=0
ACCESS_REDCAP_EXPORT_FAIL=0
ACCESSHEME_REDCAP_EXPORT_FAIL=0

MSKIMPACT_VALIDATION_FAIL=0
HEMEPACT_VALIDATION_FAIL=0
ARCHER_VALIDATION_FAIL=0
ACCESS_VALIDATION_FAIL=0
ACCESSHEME_VALIDATION_FAIL=0

# -----------------------------------------------------------------------------------------------------------
# FUNCTIONS
Expand Down Expand Up @@ -146,6 +148,28 @@ else
fi
fi


# export and commit ACCESS REDCap data
$JAVA_BINARY $JAVA_REDCAP_PIPELINE_ARGS -e -r -s mskaccess_heme -d $ACCESSHEME_REDCAP_BACKUP
if [ $? -gt 0 ]; then
echo "Failed to export REDCap data snapshot for ACCESSHEME! Aborting any changes made during export..."
cd $ACCESSHEME_REDCAP_BACKUP; $GIT_BINARY checkout -- .
ACCESSHEME_REDCAP_EXPORT_FAIL=1
sendFailureMessageMskPipelineLogsSlack "ACCESSHEME export"
else
validateRedcapExportForStudy $ACCESSHEME_REDCAP_BACKUP
if [ $? -gt 0 ]; then
echo "Validation of ACCESS REDCap snapshot failed! Aborting any changes made during export..."
ACCESSHEME_VALIDATION_FAIL=1
cd $ACCESSHEME_REDCAP_BACKUP; $GIT_BINARY checkout -- .
ACCESSHEME_REDCAP_EXPORT_FAIL=1
sendFailureMessageMskPipelineLogsSlack "ACCESSHEME validation"
else
echo "Committing ACCESSHEME REDCap data snapshot"
cd $ACCESSHEME_REDCAP_BACKUP; $GIT_BINARY add -A . ; $GIT_BINARY commit -m "ACCESSHEME REDCap Snapshot"
fi
fi

# push outgoing changesets to snapshot repo
echo "Pushing REDCap snapshot back to git repository..."
echo $(date)
Expand Down
6 changes: 5 additions & 1 deletion import-scripts/cvr_dmp_endpoint_utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,13 @@
MASTERLIST_ARCHER = 'dmp.tokens.retrieve_master_list.archer'
MASTERLIST_ACCESS = 'dmp.tokens.retrieve_master_list.access'
MASTERLIST_RAINDANCE = 'dmp.tokens.retrieve_master_list.rdts'
MASTERLIST_ACCESS_HEME = 'dmp.tokens.retrieve_master_list.accessheme'

RETRIEVE_VARIANTS_MSKIMPMACT = 'dmp.tokens.retrieve_variants.impact'
RETRIEVE_VARIANTS_HEMEPACT = 'dmp.tokens.retrieve_variants.heme'
RETRIEVE_VARIANTS_ARCHER = 'dmp.tokens.retrieve_variants.archer'
RETRIEVE_VARIANTS_ACCESS = 'dmp.tokens.retrieve_variants.access'
RETRIEVE_VARIANTS_ACCESS_HEME = 'dmp.tokens.retrieve_variants.accessheme'
RETRIEVE_VARIANTS_RAINDANCE = 'dmp.tokens.retrieve_variants.rdts'
RETIREVE_GML_VARIANTS = 'dmp.tokens.retrieve_gml_variants'

Expand All @@ -57,11 +59,13 @@
MASTERLIST_HEMEPACT,
MASTERLIST_ARCHER,
MASTERLIST_ACCESS,
MASTERLIST_ACCESS_HEME,
MASTERLIST_RAINDANCE,
RETRIEVE_VARIANTS_MSKIMPMACT,
RETRIEVE_VARIANTS_HEMEPACT,
RETRIEVE_VARIANTS_ARCHER,
RETRIEVE_VARIANTS_ACCESS,
RETRIEVE_VARIANTS_ACCESS_HEME,
RETRIEVE_VARIANTS_RAINDANCE,
RETIREVE_GML_VARIANTS
]
Expand All @@ -87,7 +91,7 @@

CONSUME_AFFECTED_ROWS = 'affectedRows'

DMP_STUDY_IDS = ['mskimpact', 'mskimpact_heme', 'mskraindance', 'mskarcher', 'mskaccess']
DMP_STUDY_IDS = ['mskimpact', 'mskimpact_heme', 'mskraindance', 'mskarcher', 'mskaccess', 'mskaccess_heme']
DMP_SAMPLE_ID_PATTERN = re.compile('P-\d+-(T|N)\d+-(IH|TB|TS|AH|AS|IM|XS)\d+')

MASTERLIST_CHECK_ARG_DESCRIPTION = '[optional] Fetches masterlist for study and reports samples from samples file that are missing from masterlist.'
Expand Down
19 changes: 19 additions & 0 deletions import-scripts/dmp-import-vars-functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,20 @@ function import_access_ddp_to_redcap {
return $return_value
}

# Function for importing access cvr files to redcap
function import_accessheme_cvr_to_redcap {
return_value=0
if ! import_project_to_redcap $MSK_ACCESS_DATA_HOME/data_clinical_mskaccess_heme_data_clinical.txt mskaccess_heme_data_clinical ; then return_value=1 ; fi
return $return_value
}

# Function for importing access supp date files to redcap
function import_accessheme_supp_date_to_redcap {
return_value=0
if ! import_project_to_redcap $MSK_ACCESS_DATA_HOME/data_clinical_mskaccess_heme_data_clinical_supp_date.txt mskaccess_heme_data_clinical_supp_date ; then return_value=1 ; fi
return $return_value
}

# Function for removing raw clinical and timeline files from study directory
function remove_raw_clinical_timeline_data_files {
STUDY_DIRECTORY=$1
Expand Down Expand Up @@ -340,6 +354,11 @@ function consumeSamplesAfterSolidHemeImport {
$JAVA_BINARY $JAVA_CVR_FETCHER_ARGS -c $MSK_ACCESS_PRIVATE_DATA_HOME/cvr_data.json -z $drop_dead_instant_string
rm -f $MSK_ACCESS_CONSUME_TRIGGER
fi
if [ -f $MSK_ACCESSHEME_CONSUME_TRIGGER ] ; then
echo "Consuming mskaccessheme samples from cvr"
$JAVA_BINARY $JAVA_CVR_FETCHER_ARGS -c $MSK_ACCESSHEME_PRIVATE_DATA_HOME/cvr_data.json -z $drop_dead_instant_string
rm -f $MSK_ACCESSHEME_CONSUME_TRIGGER
fi
}

# Function for consuming fetched samples after successful archer import
Expand Down
101 changes: 98 additions & 3 deletions import-scripts/fetch-dmp-data-for-import.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
IMPORT_STATUS_HEME=0
IMPORT_STATUS_ARCHER=0
IMPORT_STATUS_ACCESS=0
IMPORT_STATUS_ACCESSHEME=0

# Flags for ARCHER structural variants merge failure
ARCHER_MERGE_IMPACT_FAIL=0
Expand All @@ -30,12 +31,14 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
EXPORT_SUPP_DATE_HEME_FAIL=0
EXPORT_SUPP_DATE_ARCHER_FAIL=0
EXPORT_SUPP_DATE_ACCESS_FAIL=0
EXPORT_SUPP_DATE_ACCESSHEME_FAIL=0

# Assume fetchers have failed until they complete successfully
FETCH_CVR_IMPACT_FAIL=1
FETCH_CVR_HEME_FAIL=1
FETCH_CVR_ARCHER_FAIL=1
FETCH_CVR_ACCESS_FAIL=1
FETCH_CVR_ACCESSHEME_FAIL=1

UNLINKED_ARCHER_SUBSET_FAIL=0
MIXEDPACT_MERGE_FAIL=0
Expand Down Expand Up @@ -146,6 +149,13 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_data_clinical_supp_date"
fi

echo "exporting accessheme data_clinical_supp_date.txt from redcap"
export_project_from_redcap $MSK_ACCESSHEME_DATA_HOME mskaccess_heme_data_clinical_supp_date
if [ $? -gt 0 ] ; then
EXPORT_SUPP_DATE_ACCESSHEME_FAIL=1
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_heme_data_clinical_supp_date"
fi

# IF WE CANCEL ANY IMPORT, LET REDCAP GET AHEAD OF CURRENCY, BUT DON'T LET THE REPOSITORY HEAD ADVANCE [REVERT]
printTimeStampedDataProcessingStepMessage "export of cvr clinical files from redcap"
echo "exporting impact data_clinical.txt from redcap"
Expand Down Expand Up @@ -176,6 +186,12 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_data_clinical_cvr"
fi

echo "exporting accessheme data_clinical.txt from redcap"
export_project_from_redcap $MSK_ACCESSHEME_DATA_HOME mskaccess_heme_data_clinical
if [ $? -gt 0 ] ; then
IMPORT_STATUS_ACCESSHEME=1
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_heme_data_clinical_cvr"
fi
# -----------------------------------------------------------------------------------------------------------
# MSKIMPACT DATA FETCHES
# TODO: move other pre-import/data-fetch steps here (i.e exporting raw files from redcap)
Expand Down Expand Up @@ -396,6 +412,40 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
fi
fi

# -----------------------------------------------------------------------------------------------------------
# ACCESSHEME DATA FETCHES
printTimeStampedDataProcessingStepMessage "ACCESSHEME data processing"

if [ $IMPORT_STATUS_ACCESSHEME -eq 0 ] ; then
# fetch new/updated access samples using CVR Web service (must come after git fetching).
drop_dead_instant_step=$(date --date="+3hours" -Iseconds) # nearly 3 hours from now
drop_dead_instant_string=$(find_earlier_instant "$drop_dead_instant_step" "$DROP_DEAD_INSTANT_END_TO_END")
printTimeStampedDataProcessingStepMessage "CVR fetch for accessheme"
# access has -b option to block warnings for samples with zero variants (all samples will have zero variants)
$JAVA_BINARY $JAVA_CVR_FETCHER_ARGS -d $MSK_ACCESSHEME_DATA_HOME -p $MSK_ACCESSHEME_PRIVATE_DATA_HOME -n data_clinical_mskaccessheme_data_clinical.txt -i mskaccess_heme -s -b -r 50 $CVR_TEST_MODE_ARGS -z $drop_dead_instant_string
if [ $? -gt 0 ] ; then
echo "CVR ACCESSHEME fetch failed!"
echo "This will not affect importing of mskimpact"
cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard
cd $DMP_PRIVATE_DATA_HOME ; $GIT_BINARY reset HEAD --hard
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME CVR Fetch"
IMPORT_STATUS_ACCESSHEME=1
else
# check for PHI
$PYTHON_BINARY $PORTAL_HOME/scripts/phi-scanner.py -a $PIPELINES_CONFIG_HOME/properties/fetch-cvr/phi-scanner-attributes.txt -j $MSK_ACCESSHEME_PRIVATE_DATA_HOME/cvr_data.json
if [ $? -gt 0 ] ; then
echo "PHI attributes found in $MSK_ACCESS_PRIVATE_DATA_HOME/cvr_data.json! ACCESSHEME will not be imported!"
cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard
cd $DMP_PRIVATE_DATA_HOME ; $GIT_BINARY reset HEAD --hard
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME PHI attributes scan failed on $MSK_ACCESSHEME_PRIVATE_DATA_HOME/cvr_data.json"
IMPORT_STATUS_ACCESSHEME=1
else
FETCH_CVR_ACCESSHEME_FAIL=0
cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add ./* ; $GIT_BINARY commit -m "Latest ACCESSHEME dataset"
cd $MSK_ACCESSHEME_PRIVATE_DATA_HOME ; $GIT_BINARY add ./* ; $GIT_BINARY commit -m "Latest ACCESSHEME dataset"
fi
fi
fi
# -----------------------------------------------------------------------------------------------------------
# GENERATE CANCER TYPE CASE LISTS AND SUPP DATE ADDED FILES
# NOTE: Even though cancer type case lists are not needed for MSKIMPACT, HEMEPACT for the portal
Expand Down Expand Up @@ -447,6 +497,18 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard
fi

# generate case lists by cancer type and add "DATE ADDED" info to clinical data for ACCESSHEME
if [ $IMPORT_STATUS_ACCESSHEME -eq 0 ] && [ $FETCH_CVR_ACCESSHEME_FAIL -eq 0 ] ; then
# TODO: double check args for this
addCancerTypeCaseLists $MSK_ACCESSHEME_DATA_HOME "mskaccess_heme" "data_clinical_mskaccess_heme_data_clinical.txt"
cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add case_lists ; $GIT_BINARY commit -m "Latest ACCESSHEME Dataset: Case Lists"
if [ $EXPORT_SUPP_DATE_ACCESSHEME_FAIL -eq 0 ] ; then
addDateAddedData $MSK_ACCESSHEME_DATA_HOME "data_clinical_mskaccess_heme_data_clinical.txt" "data_clinical_mskaccess_heme_data_clinical_supp_date.txt"
cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add data_clinical_mskaccess_heme_data_clinical_supp_date.txt ; $GIT_BINARY commit -m "Latest ACCESSHEME Dataset: SUPP DATE ADDED"
fi
cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard
fi

# -----------------------------------------------------------------------------------------------------------
# ADDITIONAL PROCESSING

Expand Down Expand Up @@ -563,6 +625,23 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
fi
fi

## ACCESSHEME imports

# imports access cvr data into redcap
if [ $FETCH_CVR_ACCESSHEME_FAIL -eq 0 ] ; then
import_accessheme_cvr_to_redcap
if [ $? -gt 0 ] ; then
IMPORT_STATUS_ACCESSHEME=1
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME CVR Redcap Import"
fi
if [ $EXPORT_SUPP_DATE_ACCESSHEME_FAIL -eq 0 ] ; then
import_accessheme_supp_date_to_redcap
if [ $? -gt 0 ] ; then
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME Supp Date Redcap Import. Project is now empty, data restoration required"
fi
fi
fi

echo "Import into redcap finished"

# -------------------------------------------------------------
Expand All @@ -580,8 +659,11 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
echo "removing raw clinical & timeline files for mskaccess"
remove_raw_clinical_timeline_data_files $MSK_ACCESS_DATA_HOME

echo "removing raw clinical & timeline files for mskaccess"
remove_raw_clinical_timeline_data_files $MSK_ACCESSHEME_DATA_HOME

# commit raw file cleanup - study staging directories should only contain files for portal import
$GIT_BINARY commit -m "Raw clinical and timeline file cleanup: MSKIMPACT, HEMEPACT, ARCHER, ACCESS"
$GIT_BINARY commit -m "Raw clinical and timeline file cleanup: MSKIMPACT, HEMEPACT, ARCHER, ACCESS, ACCESSHEME"

# -------------------------------------------------------------
# REDCAP EXPORTS - CBIO STAGING FORMATS
Expand Down Expand Up @@ -661,6 +743,19 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
fi
fi

printTimeStampedDataProcessingStepMessage "export of redcap data for accessheme"
if [ $IMPORT_STATUS_ACCESSHEME -eq 0 ] ; then
export_stable_id_from_redcap mskaccess_heme $MSK_ACCESSHEME_DATA_HOME
if [ $? -gt 0 ] ; then
IMPORT_STATUS_ACCESSHEME=1
cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard
sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME Redcap Export"
else
touch $MSK_ACCESSHEME_CONSUME_TRIGGER
cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add * ; $GIT_BINARY commit -m "Latest ACCESSHEME Dataset: Clinical and Timeline"
fi
fi

# -------------------------------------------------------------
# UNLINKED ARCHER DATA PROCESSING
# NOTE: This processing should only occur if (1) PROCESS_UNLINKED_ARCHER_STUDY=1 and
Expand Down Expand Up @@ -734,9 +829,9 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock"
echo $(date)
fi

printTimeStampedDataProcessingStepMessage "merge of MSK-IMPACT, HEMEPACT, ACCESS data for MSKSOLIDHEME"
printTimeStampedDataProcessingStepMessage "merge of MSK-IMPACT, HEMEPACT, ACCESS, ACCESSHEME data for MSKSOLIDHEME"
# MSKSOLIDHEME merge and check exit code
$PYTHON_BINARY $PORTAL_HOME/scripts/merge.py -d $MSK_SOLID_HEME_DATA_HOME -i mskimpact -m "true" -e $MAPPED_ARCHER_SAMPLES_FILE $MSK_IMPACT_DATA_HOME $MSK_HEMEPACT_DATA_HOME $MSK_ACCESS_DATA_HOME
$PYTHON_BINARY $PORTAL_HOME/scripts/merge.py -d $MSK_SOLID_HEME_DATA_HOME -i mskimpact -m "true" -e $MAPPED_ARCHER_SAMPLES_FILE $MSK_IMPACT_DATA_HOME $MSK_HEMEPACT_DATA_HOME $MSK_ACCESS_DATA_HOME $MSK_ACCESSHEME_DATA_HOME
if [ $? -gt 0 ] ; then
echo "MSKSOLIDHEME merge failed! Study will not be updated in the portal."
echo $(date)
Expand Down
3 changes: 3 additions & 0 deletions import-scripts/pipelines_eks/automation-environment.sh
Original file line number Diff line number Diff line change
Expand Up @@ -169,12 +169,14 @@ export MSK_HEMEPACT_DATA_HOME=$DMP_DATA_HOME/mskimpact_heme
export MSK_ARCHER_DATA_HOME=$DMP_DATA_HOME/mskarcher
export MSK_ARCHER_UNFILTERED_DATA_HOME=$DMP_DATA_HOME/mskarcher_unfiltered
export MSK_ACCESS_DATA_HOME=$DMP_DATA_HOME/mskaccess
export MSK_ACCESSHEME_DATA_HOME=$DMP_DATA_HOME/mskaccess_heme
export MSK_IMPACT_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskimpact_private
export MSK_RAINDANCE_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskraindance_private
export MSK_HEMEPACT_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskimpact_heme_private
export MSK_ARCHER_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskarcher_private
export MSK_ARCHER_UNFILTERED_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskarcher_unfiltered_private
export MSK_ACCESS_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskaccess_private
export MSK_ACCESSHEME_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskaccess_heme_private
export MSK_MIXEDPACT_DATA_HOME=$DMP_DATA_HOME/mixedpact
export MSK_SOLID_HEME_DATA_HOME=$DMP_DATA_HOME/msk_solid_heme
export MSK_KINGS_DATA_HOME=$DMP_DATA_HOME/msk_kingscounty
Expand Down Expand Up @@ -205,6 +207,7 @@ export MSKIMPACT_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskimpact
export HEMEPACT_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskimpact_heme
export ARCHER_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskarcher
export ACCESS_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskaccess
export ACCESSHEME_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskaccess_heme

#######################
# environment variables used in the import-pdx-data script
Expand Down
2 changes: 1 addition & 1 deletion import-scripts/preconsume_problematic_samples.sh
Original file line number Diff line number Diff line change
Expand Up @@ -212,4 +212,4 @@ done
if need_to_log_actions ; then
log_actions
post_slack_message
fi
fi

0 comments on commit 1415361

Please sign in to comment.