diff --git a/cvr/src/main/java/org/cbioportal/cmo/pipelines/CVRPipeline.java b/cvr/src/main/java/org/cbioportal/cmo/pipelines/CVRPipeline.java index e08f1ee94..c4d88e50b 100644 --- a/cvr/src/main/java/org/cbioportal/cmo/pipelines/CVRPipeline.java +++ b/cvr/src/main/java/org/cbioportal/cmo/pipelines/CVRPipeline.java @@ -67,7 +67,7 @@ private static Options getOptions(String[] args) { .addOption("j", "json", false, "To read or not to read. This can be used alone or in combination with --gml") .addOption("g", "gml", false, "Run germline job") .addOption("s", "skipSeg", false, "Flag to skip fetching seg data") - .addOption("i", "study_id", true, "Study identifier (i.e., mskimpact, mskraindance, mskarcher, mskimpact_heme)") + .addOption("i", "study_id", true, "Study identifier (i.e., mskimpact, mskraindance, mskarcher, mskimpact_heme, mskaccess, mskaccess_heme)") .addOption("t", "test", false, "Flag for running pipeline in testing mode so that samples are not requeued or consumed") .addOption("c", "consume_samples", true, "Path to CVR json filename") .addOption("r", "max_samples_to_remove", true, "The max number of samples that can be removed from data") diff --git a/cvr/src/main/java/org/cbioportal/cmo/pipelines/cvr/SessionConfiguration.java b/cvr/src/main/java/org/cbioportal/cmo/pipelines/cvr/SessionConfiguration.java index ee8597c64..d8a864ff9 100644 --- a/cvr/src/main/java/org/cbioportal/cmo/pipelines/cvr/SessionConfiguration.java +++ b/cvr/src/main/java/org/cbioportal/cmo/pipelines/cvr/SessionConfiguration.java @@ -87,6 +87,12 @@ public class SessionConfiguration { @Value("${dmp.tokens.retrieve_master_list.access}") private String dmpMasterListAccess; + @Value("${dmp.tokens.retrieve_variants.accessheme") + private String retrieveVariantsAccessHeme; + + @Value("${dmp.tokens.retrieve_master_list.accessheme}") + private String dmpMasterListAccessHeme; + private Logger log = Logger.getLogger(SessionConfiguration.class); @Bean @@ -106,7 +112,7 @@ public Map retrieveVariantTokensMap() { map.put("mskimpact_heme", retrieveVariantsHeme); map.put("mskarcher", retrieveVariantsArcher); map.put("mskaccess", retrieveVariantsAccess); - + map.put("mskaccess_heme", retrieveVariantsAccessHeme); return map; } @@ -118,7 +124,7 @@ public Map masterListTokensMap() { map.put("mskimpact_heme", dmpMasterListHeme); map.put("mskarcher", dmpMasterListArcher); map.put("mskaccess", dmpMasterListAccess); - + map.put("mskaccess_heme", retrieveVariantsAccessHeme); return map; } diff --git a/import-scripts/backup-redcap-data.sh b/import-scripts/backup-redcap-data.sh index 4a917de12..b876a54d7 100755 --- a/import-scripts/backup-redcap-data.sh +++ b/import-scripts/backup-redcap-data.sh @@ -10,11 +10,13 @@ MSKIMPACT_REDCAP_EXPORT_FAIL=0 HEMEPACT_REDCAP_EXPORT_FAIL=0 ARCHER_REDCAP_EXPORT_FAIL=0 ACCESS_REDCAP_EXPORT_FAIL=0 +ACCESSHEME_REDCAP_EXPORT_FAIL=0 MSKIMPACT_VALIDATION_FAIL=0 HEMEPACT_VALIDATION_FAIL=0 ARCHER_VALIDATION_FAIL=0 ACCESS_VALIDATION_FAIL=0 +ACCESSHEME_VALIDATION_FAIL=0 # ----------------------------------------------------------------------------------------------------------- # FUNCTIONS @@ -146,6 +148,28 @@ else fi fi + +# export and commit ACCESS REDCap data +$JAVA_BINARY $JAVA_REDCAP_PIPELINE_ARGS -e -r -s mskaccess_heme -d $ACCESSHEME_REDCAP_BACKUP +if [ $? -gt 0 ]; then + echo "Failed to export REDCap data snapshot for ACCESSHEME! Aborting any changes made during export..." + cd $ACCESSHEME_REDCAP_BACKUP; $GIT_BINARY checkout -- . + ACCESSHEME_REDCAP_EXPORT_FAIL=1 + sendFailureMessageMskPipelineLogsSlack "ACCESSHEME export" +else + validateRedcapExportForStudy $ACCESSHEME_REDCAP_BACKUP + if [ $? -gt 0 ]; then + echo "Validation of ACCESS REDCap snapshot failed! Aborting any changes made during export..." + ACCESSHEME_VALIDATION_FAIL=1 + cd $ACCESSHEME_REDCAP_BACKUP; $GIT_BINARY checkout -- . + ACCESSHEME_REDCAP_EXPORT_FAIL=1 + sendFailureMessageMskPipelineLogsSlack "ACCESSHEME validation" + else + echo "Committing ACCESSHEME REDCap data snapshot" + cd $ACCESSHEME_REDCAP_BACKUP; $GIT_BINARY add -A . ; $GIT_BINARY commit -m "ACCESSHEME REDCap Snapshot" + fi +fi + # push outgoing changesets to snapshot repo echo "Pushing REDCap snapshot back to git repository..." echo $(date) diff --git a/import-scripts/cvr_dmp_endpoint_utility.py b/import-scripts/cvr_dmp_endpoint_utility.py index f01114794..600f2827c 100644 --- a/import-scripts/cvr_dmp_endpoint_utility.py +++ b/import-scripts/cvr_dmp_endpoint_utility.py @@ -33,11 +33,13 @@ MASTERLIST_ARCHER = 'dmp.tokens.retrieve_master_list.archer' MASTERLIST_ACCESS = 'dmp.tokens.retrieve_master_list.access' MASTERLIST_RAINDANCE = 'dmp.tokens.retrieve_master_list.rdts' +MASTERLIST_ACCESS_HEME = 'dmp.tokens.retrieve_master_list.accessheme' RETRIEVE_VARIANTS_MSKIMPMACT = 'dmp.tokens.retrieve_variants.impact' RETRIEVE_VARIANTS_HEMEPACT = 'dmp.tokens.retrieve_variants.heme' RETRIEVE_VARIANTS_ARCHER = 'dmp.tokens.retrieve_variants.archer' RETRIEVE_VARIANTS_ACCESS = 'dmp.tokens.retrieve_variants.access' +RETRIEVE_VARIANTS_ACCESS_HEME = 'dmp.tokens.retrieve_variants.accessheme' RETRIEVE_VARIANTS_RAINDANCE = 'dmp.tokens.retrieve_variants.rdts' RETIREVE_GML_VARIANTS = 'dmp.tokens.retrieve_gml_variants' @@ -57,11 +59,13 @@ MASTERLIST_HEMEPACT, MASTERLIST_ARCHER, MASTERLIST_ACCESS, + MASTERLIST_ACCESS_HEME, MASTERLIST_RAINDANCE, RETRIEVE_VARIANTS_MSKIMPMACT, RETRIEVE_VARIANTS_HEMEPACT, RETRIEVE_VARIANTS_ARCHER, RETRIEVE_VARIANTS_ACCESS, + RETRIEVE_VARIANTS_ACCESS_HEME, RETRIEVE_VARIANTS_RAINDANCE, RETIREVE_GML_VARIANTS ] @@ -87,7 +91,7 @@ CONSUME_AFFECTED_ROWS = 'affectedRows' -DMP_STUDY_IDS = ['mskimpact', 'mskimpact_heme', 'mskraindance', 'mskarcher', 'mskaccess'] +DMP_STUDY_IDS = ['mskimpact', 'mskimpact_heme', 'mskraindance', 'mskarcher', 'mskaccess', 'mskaccess_heme'] DMP_SAMPLE_ID_PATTERN = re.compile('P-\d+-(T|N)\d+-(IH|TB|TS|AH|AS|IM|XS)\d+') MASTERLIST_CHECK_ARG_DESCRIPTION = '[optional] Fetches masterlist for study and reports samples from samples file that are missing from masterlist.' diff --git a/import-scripts/dmp-import-vars-functions.sh b/import-scripts/dmp-import-vars-functions.sh index 13602b84f..9cdf26e68 100755 --- a/import-scripts/dmp-import-vars-functions.sh +++ b/import-scripts/dmp-import-vars-functions.sh @@ -270,6 +270,20 @@ function import_access_ddp_to_redcap { return $return_value } +# Function for importing access cvr files to redcap +function import_accessheme_cvr_to_redcap { + return_value=0 + if ! import_project_to_redcap $MSK_ACCESS_DATA_HOME/data_clinical_mskaccess_heme_data_clinical.txt mskaccess_heme_data_clinical ; then return_value=1 ; fi + return $return_value +} + +# Function for importing access supp date files to redcap +function import_accessheme_supp_date_to_redcap { + return_value=0 + if ! import_project_to_redcap $MSK_ACCESS_DATA_HOME/data_clinical_mskaccess_heme_data_clinical_supp_date.txt mskaccess_heme_data_clinical_supp_date ; then return_value=1 ; fi + return $return_value +} + # Function for removing raw clinical and timeline files from study directory function remove_raw_clinical_timeline_data_files { STUDY_DIRECTORY=$1 @@ -340,6 +354,11 @@ function consumeSamplesAfterSolidHemeImport { $JAVA_BINARY $JAVA_CVR_FETCHER_ARGS -c $MSK_ACCESS_PRIVATE_DATA_HOME/cvr_data.json -z $drop_dead_instant_string rm -f $MSK_ACCESS_CONSUME_TRIGGER fi + if [ -f $MSK_ACCESSHEME_CONSUME_TRIGGER ] ; then + echo "Consuming mskaccessheme samples from cvr" + $JAVA_BINARY $JAVA_CVR_FETCHER_ARGS -c $MSK_ACCESSHEME_PRIVATE_DATA_HOME/cvr_data.json -z $drop_dead_instant_string + rm -f $MSK_ACCESSHEME_CONSUME_TRIGGER + fi } # Function for consuming fetched samples after successful archer import diff --git a/import-scripts/fetch-dmp-data-for-import.sh b/import-scripts/fetch-dmp-data-for-import.sh index 3adc17fa1..1eb85c29b 100755 --- a/import-scripts/fetch-dmp-data-for-import.sh +++ b/import-scripts/fetch-dmp-data-for-import.sh @@ -20,6 +20,7 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" IMPORT_STATUS_HEME=0 IMPORT_STATUS_ARCHER=0 IMPORT_STATUS_ACCESS=0 + IMPORT_STATUS_ACCESSHEME=0 # Flags for ARCHER structural variants merge failure ARCHER_MERGE_IMPACT_FAIL=0 @@ -30,12 +31,14 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" EXPORT_SUPP_DATE_HEME_FAIL=0 EXPORT_SUPP_DATE_ARCHER_FAIL=0 EXPORT_SUPP_DATE_ACCESS_FAIL=0 + EXPORT_SUPP_DATE_ACCESSHEME_FAIL=0 # Assume fetchers have failed until they complete successfully FETCH_CVR_IMPACT_FAIL=1 FETCH_CVR_HEME_FAIL=1 FETCH_CVR_ARCHER_FAIL=1 FETCH_CVR_ACCESS_FAIL=1 + FETCH_CVR_ACCESSHEME_FAIL=1 UNLINKED_ARCHER_SUBSET_FAIL=0 MIXEDPACT_MERGE_FAIL=0 @@ -146,6 +149,13 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_data_clinical_supp_date" fi + echo "exporting accessheme data_clinical_supp_date.txt from redcap" + export_project_from_redcap $MSK_ACCESSHEME_DATA_HOME mskaccess_heme_data_clinical_supp_date + if [ $? -gt 0 ] ; then + EXPORT_SUPP_DATE_ACCESSHEME_FAIL=1 + sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_heme_data_clinical_supp_date" + fi + # IF WE CANCEL ANY IMPORT, LET REDCAP GET AHEAD OF CURRENCY, BUT DON'T LET THE REPOSITORY HEAD ADVANCE [REVERT] printTimeStampedDataProcessingStepMessage "export of cvr clinical files from redcap" echo "exporting impact data_clinical.txt from redcap" @@ -176,6 +186,12 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_data_clinical_cvr" fi + echo "exporting accessheme data_clinical.txt from redcap" + export_project_from_redcap $MSK_ACCESSHEME_DATA_HOME mskaccess_heme_data_clinical + if [ $? -gt 0 ] ; then + IMPORT_STATUS_ACCESSHEME=1 + sendPreImportFailureMessageMskPipelineLogsSlack "ACCESS Redcap export of mskaccess_heme_data_clinical_cvr" + fi # ----------------------------------------------------------------------------------------------------------- # MSKIMPACT DATA FETCHES # TODO: move other pre-import/data-fetch steps here (i.e exporting raw files from redcap) @@ -396,6 +412,40 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" fi fi + # ----------------------------------------------------------------------------------------------------------- + # ACCESSHEME DATA FETCHES + printTimeStampedDataProcessingStepMessage "ACCESSHEME data processing" + + if [ $IMPORT_STATUS_ACCESSHEME -eq 0 ] ; then + # fetch new/updated access samples using CVR Web service (must come after git fetching). + drop_dead_instant_step=$(date --date="+3hours" -Iseconds) # nearly 3 hours from now + drop_dead_instant_string=$(find_earlier_instant "$drop_dead_instant_step" "$DROP_DEAD_INSTANT_END_TO_END") + printTimeStampedDataProcessingStepMessage "CVR fetch for accessheme" + # access has -b option to block warnings for samples with zero variants (all samples will have zero variants) + $JAVA_BINARY $JAVA_CVR_FETCHER_ARGS -d $MSK_ACCESSHEME_DATA_HOME -p $MSK_ACCESSHEME_PRIVATE_DATA_HOME -n data_clinical_mskaccessheme_data_clinical.txt -i mskaccess_heme -s -b -r 50 $CVR_TEST_MODE_ARGS -z $drop_dead_instant_string + if [ $? -gt 0 ] ; then + echo "CVR ACCESSHEME fetch failed!" + echo "This will not affect importing of mskimpact" + cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard + cd $DMP_PRIVATE_DATA_HOME ; $GIT_BINARY reset HEAD --hard + sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME CVR Fetch" + IMPORT_STATUS_ACCESSHEME=1 + else + # check for PHI + $PYTHON_BINARY $PORTAL_HOME/scripts/phi-scanner.py -a $PIPELINES_CONFIG_HOME/properties/fetch-cvr/phi-scanner-attributes.txt -j $MSK_ACCESSHEME_PRIVATE_DATA_HOME/cvr_data.json + if [ $? -gt 0 ] ; then + echo "PHI attributes found in $MSK_ACCESS_PRIVATE_DATA_HOME/cvr_data.json! ACCESSHEME will not be imported!" + cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard + cd $DMP_PRIVATE_DATA_HOME ; $GIT_BINARY reset HEAD --hard + sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME PHI attributes scan failed on $MSK_ACCESSHEME_PRIVATE_DATA_HOME/cvr_data.json" + IMPORT_STATUS_ACCESSHEME=1 + else + FETCH_CVR_ACCESSHEME_FAIL=0 + cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add ./* ; $GIT_BINARY commit -m "Latest ACCESSHEME dataset" + cd $MSK_ACCESSHEME_PRIVATE_DATA_HOME ; $GIT_BINARY add ./* ; $GIT_BINARY commit -m "Latest ACCESSHEME dataset" + fi + fi + fi # ----------------------------------------------------------------------------------------------------------- # GENERATE CANCER TYPE CASE LISTS AND SUPP DATE ADDED FILES # NOTE: Even though cancer type case lists are not needed for MSKIMPACT, HEMEPACT for the portal @@ -447,6 +497,18 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard fi + # generate case lists by cancer type and add "DATE ADDED" info to clinical data for ACCESSHEME + if [ $IMPORT_STATUS_ACCESSHEME -eq 0 ] && [ $FETCH_CVR_ACCESSHEME_FAIL -eq 0 ] ; then + # TODO: double check args for this + addCancerTypeCaseLists $MSK_ACCESSHEME_DATA_HOME "mskaccess_heme" "data_clinical_mskaccess_heme_data_clinical.txt" + cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add case_lists ; $GIT_BINARY commit -m "Latest ACCESSHEME Dataset: Case Lists" + if [ $EXPORT_SUPP_DATE_ACCESSHEME_FAIL -eq 0 ] ; then + addDateAddedData $MSK_ACCESSHEME_DATA_HOME "data_clinical_mskaccess_heme_data_clinical.txt" "data_clinical_mskaccess_heme_data_clinical_supp_date.txt" + cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add data_clinical_mskaccess_heme_data_clinical_supp_date.txt ; $GIT_BINARY commit -m "Latest ACCESSHEME Dataset: SUPP DATE ADDED" + fi + cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard + fi + # ----------------------------------------------------------------------------------------------------------- # ADDITIONAL PROCESSING @@ -563,6 +625,23 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" fi fi + ## ACCESSHEME imports + + # imports access cvr data into redcap + if [ $FETCH_CVR_ACCESSHEME_FAIL -eq 0 ] ; then + import_accessheme_cvr_to_redcap + if [ $? -gt 0 ] ; then + IMPORT_STATUS_ACCESSHEME=1 + sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME CVR Redcap Import" + fi + if [ $EXPORT_SUPP_DATE_ACCESSHEME_FAIL -eq 0 ] ; then + import_accessheme_supp_date_to_redcap + if [ $? -gt 0 ] ; then + sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME Supp Date Redcap Import. Project is now empty, data restoration required" + fi + fi + fi + echo "Import into redcap finished" # ------------------------------------------------------------- @@ -580,8 +659,11 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" echo "removing raw clinical & timeline files for mskaccess" remove_raw_clinical_timeline_data_files $MSK_ACCESS_DATA_HOME + echo "removing raw clinical & timeline files for mskaccess" + remove_raw_clinical_timeline_data_files $MSK_ACCESSHEME_DATA_HOME + # commit raw file cleanup - study staging directories should only contain files for portal import - $GIT_BINARY commit -m "Raw clinical and timeline file cleanup: MSKIMPACT, HEMEPACT, ARCHER, ACCESS" + $GIT_BINARY commit -m "Raw clinical and timeline file cleanup: MSKIMPACT, HEMEPACT, ARCHER, ACCESS, ACCESSHEME" # ------------------------------------------------------------- # REDCAP EXPORTS - CBIO STAGING FORMATS @@ -661,6 +743,19 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" fi fi + printTimeStampedDataProcessingStepMessage "export of redcap data for accessheme" + if [ $IMPORT_STATUS_ACCESSHEME -eq 0 ] ; then + export_stable_id_from_redcap mskaccess_heme $MSK_ACCESSHEME_DATA_HOME + if [ $? -gt 0 ] ; then + IMPORT_STATUS_ACCESSHEME=1 + cd $DMP_DATA_HOME ; $GIT_BINARY reset HEAD --hard + sendPreImportFailureMessageMskPipelineLogsSlack "ACCESSHEME Redcap Export" + else + touch $MSK_ACCESSHEME_CONSUME_TRIGGER + cd $MSK_ACCESSHEME_DATA_HOME ; $GIT_BINARY add * ; $GIT_BINARY commit -m "Latest ACCESSHEME Dataset: Clinical and Timeline" + fi + fi + # ------------------------------------------------------------- # UNLINKED ARCHER DATA PROCESSING # NOTE: This processing should only occur if (1) PROCESS_UNLINKED_ARCHER_STUDY=1 and @@ -734,9 +829,9 @@ MY_FLOCK_FILEPATH="/data/portal-cron/cron-lock/fetch-dmp-data-for-import.lock" echo $(date) fi - printTimeStampedDataProcessingStepMessage "merge of MSK-IMPACT, HEMEPACT, ACCESS data for MSKSOLIDHEME" + printTimeStampedDataProcessingStepMessage "merge of MSK-IMPACT, HEMEPACT, ACCESS, ACCESSHEME data for MSKSOLIDHEME" # MSKSOLIDHEME merge and check exit code - $PYTHON_BINARY $PORTAL_HOME/scripts/merge.py -d $MSK_SOLID_HEME_DATA_HOME -i mskimpact -m "true" -e $MAPPED_ARCHER_SAMPLES_FILE $MSK_IMPACT_DATA_HOME $MSK_HEMEPACT_DATA_HOME $MSK_ACCESS_DATA_HOME + $PYTHON_BINARY $PORTAL_HOME/scripts/merge.py -d $MSK_SOLID_HEME_DATA_HOME -i mskimpact -m "true" -e $MAPPED_ARCHER_SAMPLES_FILE $MSK_IMPACT_DATA_HOME $MSK_HEMEPACT_DATA_HOME $MSK_ACCESS_DATA_HOME $MSK_ACCESSHEME_DATA_HOME if [ $? -gt 0 ] ; then echo "MSKSOLIDHEME merge failed! Study will not be updated in the portal." echo $(date) diff --git a/import-scripts/pipelines_eks/automation-environment.sh b/import-scripts/pipelines_eks/automation-environment.sh index e9e16e0ed..e4ef09537 100755 --- a/import-scripts/pipelines_eks/automation-environment.sh +++ b/import-scripts/pipelines_eks/automation-environment.sh @@ -169,12 +169,14 @@ export MSK_HEMEPACT_DATA_HOME=$DMP_DATA_HOME/mskimpact_heme export MSK_ARCHER_DATA_HOME=$DMP_DATA_HOME/mskarcher export MSK_ARCHER_UNFILTERED_DATA_HOME=$DMP_DATA_HOME/mskarcher_unfiltered export MSK_ACCESS_DATA_HOME=$DMP_DATA_HOME/mskaccess +export MSK_ACCESSHEME_DATA_HOME=$DMP_DATA_HOME/mskaccess_heme export MSK_IMPACT_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskimpact_private export MSK_RAINDANCE_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskraindance_private export MSK_HEMEPACT_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskimpact_heme_private export MSK_ARCHER_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskarcher_private export MSK_ARCHER_UNFILTERED_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskarcher_unfiltered_private export MSK_ACCESS_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskaccess_private +export MSK_ACCESSHEME_PRIVATE_DATA_HOME=$DMP_PRIVATE_DATA_HOME/mskaccess_heme_private export MSK_MIXEDPACT_DATA_HOME=$DMP_DATA_HOME/mixedpact export MSK_SOLID_HEME_DATA_HOME=$DMP_DATA_HOME/msk_solid_heme export MSK_KINGS_DATA_HOME=$DMP_DATA_HOME/msk_kingscounty @@ -205,6 +207,7 @@ export MSKIMPACT_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskimpact export HEMEPACT_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskimpact_heme export ARCHER_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskarcher export ACCESS_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskaccess +export ACCESSHEME_REDCAP_BACKUP=$REDCAP_BACKUP_DATA_HOME/mskaccess_heme ####################### # environment variables used in the import-pdx-data script diff --git a/import-scripts/preconsume_problematic_samples.sh b/import-scripts/preconsume_problematic_samples.sh index fffb63fb7..8444238b5 100755 --- a/import-scripts/preconsume_problematic_samples.sh +++ b/import-scripts/preconsume_problematic_samples.sh @@ -212,4 +212,4 @@ done if need_to_log_actions ; then log_actions post_slack_message -fi \ No newline at end of file +fi