diff --git a/.secrets.baseline b/.secrets.baseline
index 914a812a..87ff194d 100644
--- a/.secrets.baseline
+++ b/.secrets.baseline
@@ -3,7 +3,7 @@
     "files": "go.sum|package-lock.json|^.secrets.baseline$",
     "lines": null
   },
-  "generated_at": "2024-10-11T12:56:06Z",
+  "generated_at": "2024-11-13T21:58:34Z",
   "plugins_used": [
     {
       "name": "AWSKeyDetector"
@@ -100,7 +100,7 @@
         "hashed_secret": "5996c731c43c6191af2324af0230bd65e723fcdb",
         "is_secret": false,
         "is_verified": false,
-        "line_number": 357,
+        "line_number": 358,
         "type": "Secret Keyword",
         "verified_result": null
       },
@@ -108,7 +108,7 @@
         "hashed_secret": "03e60e3e0d9675b19754e2a81bbb48a26af858e7",
         "is_secret": false,
         "is_verified": false,
-        "line_number": 825,
+        "line_number": 826,
         "type": "Secret Keyword",
         "verified_result": null
       }
@@ -128,7 +128,7 @@
         "hashed_secret": "892bd503fb45f6fcafb1c7003d88291fc0b20208",
         "is_secret": false,
         "is_verified": false,
-        "line_number": 270,
+        "line_number": 284,
         "type": "Secret Keyword",
         "verified_result": null
       },
@@ -136,7 +136,7 @@
         "hashed_secret": "5da5a31d49370df43eff521b39c10db1466fae44",
         "is_secret": false,
         "is_verified": false,
-        "line_number": 273,
+        "line_number": 287,
         "type": "Secret Keyword",
         "verified_result": null
       },
@@ -144,7 +144,7 @@
         "hashed_secret": "d4c3d66fd0c38547a3c7a4c6bdc29c36911bc030",
         "is_secret": false,
         "is_verified": false,
-        "line_number": 463,
+        "line_number": 488,
         "type": "Secret Keyword",
         "verified_result": null
       }
@@ -216,7 +216,7 @@
         "hashed_secret": "b4e929aa58c928e3e44d12e6f873f39cd8207a25",
         "is_secret": false,
         "is_verified": false,
-        "line_number": 666,
+        "line_number": 520,
         "type": "Secret Keyword",
         "verified_result": null
       },
@@ -224,7 +224,7 @@
         "hashed_secret": "16282376ddaaaf2bf60be9041a7504280f3f338b",
         "is_secret": false,
         "is_verified": false,
-        "line_number": 679,
+        "line_number": 533,
         "type": "Secret Keyword",
         "verified_result": null
       }
diff --git a/cloudinfo/projects.go b/cloudinfo/projects.go
index d80e9339..385e4603 100644
--- a/cloudinfo/projects.go
+++ b/cloudinfo/projects.go
@@ -3,8 +3,6 @@ package cloudinfo
 import (
 	"encoding/json"
 	"fmt"
-	"github.com/IBM/go-sdk-core/v5/core"
-	project "github.com/IBM/project-go-sdk/projectv1"
 	"log"
 	"math/rand"
 	"os"
@@ -13,6 +11,9 @@ import (
 	"sort"
 	"strconv"
 	"strings"
+
+	"github.com/IBM/go-sdk-core/v5/core"
+	project "github.com/IBM/project-go-sdk/projectv1"
 )
 
 // CreateProjectFromConfig creates a project with the given config
@@ -900,9 +901,13 @@ func (infoSvc *CloudInfoService) LookupMemberNameByID(stackDetails *project.Proj
 }
 
 // GetSchematicsJobLogsForMember gets the schematics job logs for a member
-func (infoSvc *CloudInfoService) GetSchematicsJobLogsForMember(member *project.ProjectConfig, memberName string) (details string, terraformLogs string) {
+func (infoSvc *CloudInfoService) GetSchematicsJobLogsForMember(member *project.ProjectConfig, memberName string, projectRegion string) (details string, terraformLogs string) {
 	var logMessage strings.Builder
 	var terraformLogMessage strings.Builder
+
+	// determine schematics geo location from project region
+	schematicsLocation := projectRegion[0:2]
+
 	logMessage.WriteString(fmt.Sprintf("Schematics job logs for member: %s", memberName))
 
 	if member.Schematics != nil && member.Schematics.WorkspaceCrn != nil {
@@ -935,7 +940,7 @@ func (infoSvc *CloudInfoService) GetSchematicsJobLogsForMember(member *project.P
 					jobURL := strings.Split(url, "/jobs?region=")[0]
 					jobURL = fmt.Sprintf("%s/log/%s", jobURL, jobID)
 					logMessage.WriteString(fmt.Sprintf("\nSchematics Job URL: %s", jobURL))
-					logs, errGetLogs := infoSvc.GetSchematicsJobLogsText(jobID)
+					logs, errGetLogs := infoSvc.GetSchematicsJobLogsText(jobID, schematicsLocation)
 					if errGetLogs != nil {
 						terraformLogMessage.WriteString(fmt.Sprintf("\nError getting job logs for Job ID: %s member: %s, error: %s", jobID, memberName, errGetLogs))
 					} else {
@@ -988,7 +993,7 @@ func (infoSvc *CloudInfoService) GetSchematicsJobLogsForMember(member *project.P
 				jobURL := strings.Split(url, "/jobs?region=")[0]
 				jobURL = fmt.Sprintf("%s/log/%s", jobURL, jobID)
 				logMessage.WriteString(fmt.Sprintf("\nSchematics Job URL: %s", jobURL))
-				logs, errGetLogs := infoSvc.GetSchematicsJobLogsText(jobID)
+				logs, errGetLogs := infoSvc.GetSchematicsJobLogsText(jobID, schematicsLocation)
 				if errGetLogs != nil {
 					terraformLogMessage.WriteString(fmt.Sprintf("\nError getting job logs for Job ID: %s member: %s, error: %s", jobID, memberName, errGetLogs))
 				} else {
@@ -1040,7 +1045,7 @@ func (infoSvc *CloudInfoService) GetSchematicsJobLogsForMember(member *project.P
 				jobURL := strings.Split(url, "/jobs?region=")[0]
 				jobURL = fmt.Sprintf("%s/log/%s", jobURL, jobID)
 				logMessage.WriteString(fmt.Sprintf("\nSchematics Job URL: %s", jobURL))
-				logs, errGetLogs := infoSvc.GetSchematicsJobLogsText(jobID)
+				logs, errGetLogs := infoSvc.GetSchematicsJobLogsText(jobID, schematicsLocation)
 				if errGetLogs != nil {
 					terraformLogMessage.WriteString(fmt.Sprintf("\nError getting job logs for Job ID: %s member: %s, error: %s", jobID, memberName, errGetLogs))
 				} else {
diff --git a/cloudinfo/schematics.go b/cloudinfo/schematics.go
index f81ed014..b4a3be0b 100644
--- a/cloudinfo/schematics.go
+++ b/cloudinfo/schematics.go
@@ -2,16 +2,29 @@ package cloudinfo
 
 import (
 	"fmt"
+	"math/rand"
+	"net/url"
+	"strings"
+
 	"github.com/IBM/go-sdk-core/v5/core"
 	schematics "github.com/IBM/schematics-go-sdk/schematicsv1"
-	"io"
-	"net/http"
-	"strings"
-	"time"
+	"github.com/IBM/vpc-go-sdk/common"
 )
 
-func (infoSvc *CloudInfoService) GetSchematicsJobLogs(jobID string) (result *schematics.JobLog, response *core.DetailedResponse, err error) {
-	return infoSvc.schematicsService.ListJobLogs(
+// will return a previously configured schematics service based on location
+// error returned if location not initialized
+// location must be a valid geographical location supported by schematics: "us" or "eu"
+func (infoSvc *CloudInfoService) GetSchematicsServiceByLocation(location string) (schematicsService, error) {
+	service, isFound := infoSvc.schematicsServices[location]
+	if !isFound {
+		return nil, fmt.Errorf("could not find Schematics Service for location %s", location)
+	}
+
+	return service, nil
+}
+
+func (infoSvc *CloudInfoService) GetSchematicsJobLogs(jobID string, location string) (result *schematics.JobLog, response *core.DetailedResponse, err error) {
+	return infoSvc.schematicsServices[location].ListJobLogs(
 		&schematics.ListJobLogsOptions{
 			JobID: core.StringPtr(jobID),
 		},
@@ -21,56 +34,120 @@ func (infoSvc *CloudInfoService) GetSchematicsJobLogs(jobID string) (result *sch
 // GetSchematicsJobLogsText retrieves the logs of a Schematics job as a string
 // The logs are returned as a string, or an error if the operation failed
 // This is a temporary workaround until the Schematics GO SDK is fixed, ListJobLogs is broken as the response is text/plain and not application/json
-func (infoSvc *CloudInfoService) GetSchematicsJobLogsText(jobID string) (logs string, err error) {
-	const maxRetries = 3
-	const retryDelay = 2 * time.Second
-
-	url := fmt.Sprintf("https://schematics.cloud.ibm.com/v2/jobs/%s/logs", jobID)
-	var retryErrors []string
-
-	for attempt := 1; attempt <= maxRetries; attempt++ {
-		// Create the request
-		req, err := http.NewRequest("GET", url, nil)
-		if err != nil {
-			return "", fmt.Errorf("failed to create request: %v", err)
-		}
-
-		// Authenticate the request
-		err = infoSvc.authenticator.Authenticate(req)
-		if err != nil {
-			return "", fmt.Errorf("failed to authenticate: %v", err)
-		}
-
-		// Make the request
-		client := &http.Client{}
-		resp, err := client.Do(req)
-		if err != nil {
-			retryErrors = append(retryErrors, fmt.Sprintf("attempt %d: failed to make request: %v", attempt, err))
-			if attempt < maxRetries {
-				time.Sleep(retryDelay)
-				continue
-			}
-			return "", fmt.Errorf("exceeded maximum retries, attempt failures:\n%s", strings.Join(retryErrors, "\n"))
-		}
-		defer resp.Body.Close()
-
-		// Check if the response status is successful
-		if resp.StatusCode >= 200 && resp.StatusCode < 300 {
-			// Read the response body
-			body, err := io.ReadAll(resp.Body)
-			if err != nil {
-				return "", fmt.Errorf("failed to read response body: %v", err)
-			}
-			return string(body), nil
-		} else {
-			retryErrors = append(retryErrors, fmt.Sprintf("attempt %d: request failed with status code: %d", attempt, resp.StatusCode))
-			if attempt < maxRetries {
-				time.Sleep(retryDelay)
-				continue
-			}
-			return "", fmt.Errorf("exceeded maximum retries, attempt failures:\n%s", strings.Join(retryErrors, "\n"))
-		}
+// location must be a valid geographical location supported by schematics: "us" or "eu"
+func (infoSvc *CloudInfoService) GetSchematicsJobLogsText(jobID string, location string) (string, error) {
+
+	svc, svcErr := infoSvc.GetSchematicsServiceByLocation(location)
+	if svcErr != nil {
+		return "", fmt.Errorf("error getting schematics service for location %s:%w", location, svcErr)
+	}
+
+	// build up a REST API call for job logs
+	pathParamsMap := map[string]string{
+		"job_id": jobID,
+	}
+	builder := core.NewRequestBuilder(core.GET)
+	builder.EnableGzipCompression = svc.GetEnableGzipCompression()
+	_, builderErr := builder.ResolveRequestURL(svc.GetServiceURL(), `/v2/jobs/{job_id}/logs`, pathParamsMap)
+	if builderErr != nil {
+		return "", builderErr
 	}
+	sdkHeaders := common.GetSdkHeaders("schematics", "V1", "ListJobLogs")
+	for headerName, headerValue := range sdkHeaders {
+		builder.AddHeader(headerName, headerValue)
+	}
+	builder.AddHeader("Accept", "application/json")
+
+	request, buildErr := builder.Build()
+	if buildErr != nil {
+		return "", buildErr
+	}
+
+	// initialize the IBM Core HTTP service
+	baseService, baseSvcErr := core.NewBaseService(&core.ServiceOptions{
+		URL:           svc.GetServiceURL(),
+		Authenticator: infoSvc.authenticator,
+	})
+	if baseSvcErr != nil {
+		return "", baseSvcErr
+	}
+
+	// make the builder request call on the core http service, which is text/plain
+	// using response type "**string" to get raw text output
+	rawResponse := core.StringPtr("")
+	_, requestErr := baseService.Request(request, &rawResponse)
+	if requestErr != nil {
+		return "", requestErr
+	}
+
+	return *rawResponse, nil
+}
+
+// GetSchematicsJobFileData will download a specific job file and return a JobFileData structure.
+// Allowable values for fileType: state_file, plan_json
+// location must be a valid geographical location supported by schematics: "us" or "eu"
+func (infoSvc *CloudInfoService) GetSchematicsJobFileData(jobID string, fileType string, location string) (*schematics.JobFileData, error) {
+	// setup options
+	// file type Allowable values: [template_repo,readme_file,log_file,state_file,plan_json]
+	jobFileOptions := &schematics.GetJobFilesOptions{
+		JobID:    core.StringPtr(jobID),
+		FileType: core.StringPtr(fileType),
+	}
+
+	// get a service based on location
+	svc, svcErr := infoSvc.GetSchematicsServiceByLocation(location)
+	if svcErr != nil {
+		return nil, fmt.Errorf("error getting schematics service for location %s:%w", location, svcErr)
+	}
+
+	data, _, err := svc.GetJobFiles(jobFileOptions)
+
+	return data, err
+}
+
+// Returns a string of the unmarshalled `Terraform Plan JSON` produced by a schematics job
+// location must be a valid geographical location supported by schematics: "us" or "eu"
+func (infoSvc *CloudInfoService) GetSchematicsJobPlanJson(jobID string, location string) (string, error) {
+	// get the plan_json file for the job
+	data, dataErr := infoSvc.GetSchematicsJobFileData(jobID, "plan_json", location)
+
+	// check for multiple error conditions
+	if dataErr != nil {
+		return "", dataErr
+	}
+	if data == nil {
+		return "", fmt.Errorf("job file data object is nil, which is unexpected")
+	}
+	if data.FileContent == nil {
+		return "", fmt.Errorf("file content is nil, which is unexpected")
+	}
+
+	// extract the plan file content and return
+	contentPtr := data.FileContent
+
+	return *contentPtr, nil
+}
+
+// returns a random selected region that is valid for Schematics Workspace creation
+func GetRandomSchematicsLocation() string {
+	validLocations := GetSchematicsLocations()
+	randomIndex := rand.Intn(len(validLocations))
+	return validLocations[randomIndex]
+}
+
+// returns the appropriate schematics API endpoint based on specific region
+// the region can be geographic (us or eu) or specific (us-south)
+func GetSchematicServiceURLForRegion(region string) (string, error) {
+	// the service URLs are simply the region in front of base default
+
+	// first, get the default URL from official service
+	url, parseErr := url.Parse(schematics.DefaultServiceURL)
+	if parseErr != nil {
+		return "", fmt.Errorf("error parsing default schematics URL: %w", parseErr)
+	}
+
+	// prefix the region in front of existing host
+	url.Host = strings.ToLower(region) + "." + url.Host
 
-	return "", fmt.Errorf("exceeded maximum retries, attempt failures:\n%s", strings.Join(retryErrors, "\n"))
+	return url.String(), nil
 }
diff --git a/cloudinfo/service.go b/cloudinfo/service.go
index 7d2b999a..d01ecea1 100644
--- a/cloudinfo/service.go
+++ b/cloudinfo/service.go
@@ -3,11 +3,13 @@ package cloudinfo
 
 import (
 	"errors"
-	schematics "github.com/IBM/schematics-go-sdk/schematicsv1"
+	"fmt"
 	"log"
 	"os"
 	"sync"
 
+	schematics "github.com/IBM/schematics-go-sdk/schematicsv1"
+
 	"github.com/IBM/platform-services-go-sdk/catalogmanagementv1"
 	projects "github.com/IBM/project-go-sdk/projectv1"
 
@@ -45,8 +47,9 @@ type CloudInfoService struct {
 	lock                   sync.Mutex
 	icdService             icdService
 	projectsService        projectsService
-	schematicsService      schematicsService
-	ApiKey                 string
+	// schematics is regional, this map contains schematics services by location
+	schematicsServices map[string]schematicsService
+	ApiKey             string
 }
 
 // interface for the cloudinfo service (can be mocked in tests)
@@ -80,10 +83,13 @@ type CloudInfoServiceI interface {
 	GetStackMembers(stackConfig *ConfigDetails) ([]*projects.ProjectConfig, error)
 	SyncConfig(projectID string, configID string) (response *core.DetailedResponse, err error)
 	LookupMemberNameByID(stackDetails *projects.ProjectConfig, memberID string) (string, error)
-	GetSchematicsJobLogs(jobID string) (result *schematics.JobLog, response *core.DetailedResponse, err error)
-	GetSchematicsJobLogsText(jobID string) (logs string, err error)
+	GetSchematicsJobLogs(jobID string, location string) (result *schematics.JobLog, response *core.DetailedResponse, err error)
+	GetSchematicsJobLogsText(jobID string, location string) (logs string, err error)
 	ArePipelineActionsRunning(stackConfig *ConfigDetails) (bool, error)
-	GetSchematicsJobLogsForMember(member *projects.ProjectConfig, memberName string) (string, string)
+	GetSchematicsJobLogsForMember(member *projects.ProjectConfig, memberName string, projectRegion string) (string, string)
+	GetSchematicsJobFileData(jobID string, fileType string, location string) (*schematics.JobFileData, error)
+	GetSchematicsJobPlanJson(jobID string, location string) (string, error)
+	GetSchematicsServiceByLocation(location string) (schematicsService, error)
 }
 
 // CloudInfoServiceOptions structure used as input params for service constructor.
@@ -101,7 +107,7 @@ type CloudInfoServiceOptions struct {
 	IcdService                icdService
 	ProjectsService           projectsService
 	CatalogService            catalogService
-	SchematicsService         schematicsService
+	SchematicsServices        map[string]schematicsService
 	// StackDefinitionCreator is used to create stack definitions and only added to support testing/mocking
 	StackDefinitionCreator StackDefinitionCreator
 }
@@ -208,6 +214,9 @@ type catalogService interface {
 // schematicsService for external Schematics V1 Service API. Used for mocking.
 type schematicsService interface {
 	ListJobLogs(listJobLogsOptions *schematics.ListJobLogsOptions) (result *schematics.JobLog, response *core.DetailedResponse, err error)
+	GetJobFiles(getJobFilesOptions *schematics.GetJobFilesOptions) (result *schematics.JobFileData, response *core.DetailedResponse, err error)
+	GetEnableGzipCompression() bool
+	GetServiceURL() string
 }
 
 // ReplaceCBRRule replaces a CBR rule using the provided options.
@@ -250,6 +259,11 @@ func (regions SortedRegionsDataByPriority) Swap(i, j int) {
 	regions[i], regions[j] = regions[j], regions[i]
 }
 
+// Returns a constant of supported locations for schematics service
+func GetSchematicsLocations() []string {
+	return []string{"us", "eu"}
+}
+
 // NewCloudInfoServiceWithKey is a factory function used for creating a new initialized service structure.
 // This function can be called if an IBM Cloud API Key is known and passed in directly.
 // Returns a pointer to an initialized CloudInfoService and error.
@@ -429,19 +443,30 @@ func NewCloudInfoServiceWithKey(options CloudInfoServiceOptions) (*CloudInfoServ
 
 	}
 
-	if options.SchematicsService != nil {
-		infoSvc.schematicsService = options.SchematicsService
+	// Schematics is a regional endpoint service, and cross-location API calls do not work.
+	// Here we will set up multiple services for the known geographic locations (US and EU)
+	if options.SchematicsServices != nil {
+		infoSvc.schematicsServices = options.SchematicsServices
 	} else {
-		schematicsClient, schematicsErr := schematics.NewSchematicsV1(&schematics.SchematicsV1Options{
-			Authenticator: infoSvc.authenticator,
-		})
-		if schematicsErr != nil {
-			log.Println("Error creating schematics client:", schematicsErr)
-			return nil, schematicsErr
-		}
+		infoSvc.schematicsServices = make(map[string]schematicsService)
+		for _, schematicsLocation := range GetSchematicsLocations() {
+			schematicsUrl, schematicsUrlErr := GetSchematicServiceURLForRegion(schematicsLocation)
+			if schematicsUrlErr != nil {
+				return nil, fmt.Errorf("error determining Schematics URL:%w", schematicsUrlErr)
+			}
+			schematicsClient, schematicsErr := schematics.NewSchematicsV1(&schematics.SchematicsV1Options{
+				Authenticator: infoSvc.authenticator,
+				URL:           schematicsUrl,
+			})
+			if schematicsErr != nil {
+				log.Println("Error creating schematics client:", schematicsErr)
+				return nil, fmt.Errorf("error creating schematics client:%w", schematicsErr)
+			}
 
-		infoSvc.schematicsService = schematicsClient
+			infoSvc.schematicsServices[schematicsLocation] = schematicsClient
+		}
 	}
+
 	if options.StackDefinitionCreator != nil {
 		infoSvc.stackDefinitionCreator = options.StackDefinitionCreator
 	} else {
diff --git a/common/json_helpers.go b/common/json_helpers.go
index b297ee0b..9fab49c5 100644
--- a/common/json_helpers.go
+++ b/common/json_helpers.go
@@ -152,23 +152,26 @@ func sanitizeJSON(data interface{}, secureList map[string]interface{}) {
 	switch v := data.(type) {
 	case map[string]interface{}:
 		for key := range v {
-			if _, ok := secureList[key]; ok {
-				// Generate a random salt value
-				salt := make([]byte, 16) // You can choose the salt length as needed
-				_, err := rand.Read(salt)
-				if err != nil {
-					fmt.Println("Error generating salt:", err)
-					return
+			// NOTE: before and after sensitive sections do not contain values, only booleans denoting sensitive, so skip these sections
+			if key != "before_sensitive" && key != "after_sensitive" && key != "after_unknown" {
+				if _, ok := secureList[key]; ok {
+					// Generate a random salt value
+					salt := make([]byte, 16) // You can choose the salt length as needed
+					_, err := rand.Read(salt)
+					if err != nil {
+						fmt.Println("Error generating salt:", err)
+						return
+					}
+
+					// Concatenate the salt and input
+					saltedInput := append(salt, []byte(fmt.Sprintf("%v", v[key]))...)
+					// Replace sensitive values with SANITIZE_STRING+Hash of the value.
+					hashedValue := sha256.Sum224(saltedInput)
+					v[key] = SANITIZE_STRING + fmt.Sprintf("-%x", hashedValue)
+				} else {
+					// Recursively sanitize nested data.
+					sanitizeJSON(v[key], secureList)
 				}
-
-				// Concatenate the salt and input
-				saltedInput := append(salt, []byte(fmt.Sprintf("%v", v[key]))...)
-				// Replace sensitive values with SANITIZE_STRING+Hash of the value.
-				hashedValue := sha256.Sum224(saltedInput)
-				v[key] = SANITIZE_STRING + fmt.Sprintf("-%x", hashedValue)
-			} else {
-				// Recursively sanitize nested data.
-				sanitizeJSON(v[key], secureList)
 			}
 		}
 	case []interface{}:
diff --git a/testhelper/general_test.go b/testhelper/general_test.go
index 6dd6b47f..b2b8ea3a 100644
--- a/testhelper/general_test.go
+++ b/testhelper/general_test.go
@@ -2,13 +2,14 @@ package testhelper
 
 import (
 	"errors"
+	"os"
+	"sync"
+	"testing"
+
 	"github.com/IBM/go-sdk-core/v5/core"
 	projects "github.com/IBM/project-go-sdk/projectv1"
 	schematics "github.com/IBM/schematics-go-sdk/schematicsv1"
 	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/cloudinfo"
-	"os"
-	"sync"
-	"testing"
 
 	"github.com/IBM/platform-services-go-sdk/catalogmanagementv1"
 	"github.com/stretchr/testify/assert"
@@ -191,11 +192,11 @@ func (mock *cloudInfoServiceMock) GetClusterIngressStatus(string) (string, error
 	return "", nil
 }
 
-func (mock *cloudInfoServiceMock) GetSchematicsJobLogs(string) (*schematics.JobLog, *core.DetailedResponse, error) {
+func (mock *cloudInfoServiceMock) GetSchematicsJobLogs(string, string) (*schematics.JobLog, *core.DetailedResponse, error) {
 	return nil, nil, nil
 
 }
-func (mock *cloudInfoServiceMock) GetSchematicsJobLogsText(string) (string, error) {
+func (mock *cloudInfoServiceMock) GetSchematicsJobLogsText(string, string) (string, error) {
 	return "", nil
 
 }
@@ -204,7 +205,7 @@ func (mock *cloudInfoServiceMock) ArePipelineActionsRunning(stackConfig *cloudin
 	return false, nil
 }
 
-func (mock *cloudInfoServiceMock) GetSchematicsJobLogsForMember(member *projects.ProjectConfig, memberName string) (string, string) {
+func (mock *cloudInfoServiceMock) GetSchematicsJobLogsForMember(member *projects.ProjectConfig, memberName string, projectRegion string) (string, string) {
 	return "", ""
 }
 
diff --git a/testhelper/terraform.go b/testhelper/terraform.go
index 9d93094b..01560017 100644
--- a/testhelper/terraform.go
+++ b/testhelper/terraform.go
@@ -1,10 +1,9 @@
 package testhelper
 
 import (
+	"encoding/json"
 	"errors"
 	"fmt"
-	"github.com/gruntwork-io/terratest/modules/files"
-	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/common"
 	"log"
 	"os"
 	"os/exec"
@@ -12,6 +11,13 @@ import (
 	"reflect"
 	"regexp"
 	"strings"
+
+	"github.com/gruntwork-io/terratest/modules/files"
+	"github.com/gruntwork-io/terratest/modules/logger"
+	"github.com/gruntwork-io/terratest/modules/terraform"
+	tfjson "github.com/hashicorp/terraform-json"
+	"github.com/stretchr/testify/assert"
+	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/common"
 )
 
 // RemoveFromStateFile Attempts to remove resource from state file
@@ -63,9 +69,9 @@ func ValidateTerraformOutputs(outputs map[string]interface{}, expectedKeys ...st
 		if !ok {
 			missingKeys = append(missingKeys, key)
 			if err != nil {
-				err = fmt.Errorf("%wOutput: %s'%s'%s was not found\n", err, blueBold, key, reset)
+				err = fmt.Errorf("%w\noutput: %s'%s'%s was not found", err, blueBold, key, reset)
 			} else {
-				err = fmt.Errorf("Output: %s'%s'%s was not found\n", blueBold, key, reset)
+				err = fmt.Errorf("output: %s'%s'%s was not found", blueBold, key, reset)
 			}
 		} else {
 			if value == nil || (reflect.TypeOf(value).String() == "string" && len(strings.Trim(value.(string), " ")) == 0) {
@@ -77,9 +83,9 @@ func ValidateTerraformOutputs(outputs map[string]interface{}, expectedKeys ...st
 					expected = "blank string"
 				}
 				if err != nil {
-					err = fmt.Errorf("%wOutput: %s'%s'%s was not expected to be %s\n", err, blueBold, key, reset, expected)
+					err = fmt.Errorf("%w\noutput: %s'%s'%s was not expected to be %s", err, blueBold, key, reset, expected)
 				} else {
-					err = fmt.Errorf("Output: %s'%s'%s was not expected to be %s\n", blueBold, key, reset, expected)
+					err = fmt.Errorf("output: %s'%s'%s was not expected to be %s", blueBold, key, reset, expected)
 				}
 			}
 		}
@@ -121,3 +127,182 @@ func CleanTerraformDir(directory string) {
 		}
 	}
 }
+
+// checkConsistency Fails the test if any destroys are detected and the resource is not exempt.
+// If any addresses are provided in IgnoreUpdates.List then fail on updates too unless the resource is exempt
+// Returns TRUE if there were consistency changes that were identified
+func CheckConsistency(plan *terraform.PlanStruct, testOptions CheckConsistencyOptionsI) bool {
+	validChange := false
+
+	// extract consistency options from base set of options (schematic or terratest)
+	options := testOptions.GetCheckConsistencyOptions()
+
+	for _, resource := range plan.ResourceChangesMap {
+		// get JSON string of full changes for the logs
+		changesBytes, changesErr := json.MarshalIndent(resource.Change, "", "  ")
+		// if it errors in the marshall step, just put a placeholder and move on, not important
+		changesJson := "--UNAVAILABLE--"
+		if changesErr == nil {
+			changesJson = string(changesBytes)
+		}
+
+		var resourceDetails string
+
+		// Treat all keys in the BeforeSensitive and AfterSensitive maps as sensitive
+		// Assuming BeforeSensitive and AfterSensitive are of type interface{}
+		beforeSensitive, beforeSensitiveOK := resource.Change.BeforeSensitive.(map[string]interface{})
+		afterSensitive, afterSensitiveOK := resource.Change.AfterSensitive.(map[string]interface{})
+
+		// Create the mergedSensitive map
+		mergedSensitive := make(map[string]interface{})
+
+		// Check if BeforeSensitive is of the expected type
+		if beforeSensitiveOK {
+			// Copy the keys and values from BeforeSensitive to the mergedSensitive map.
+			for key, value := range beforeSensitive {
+				// if value is non boolean, that means the terraform attribute was a map.
+				// if a map, then it is only valid if it has fields assigned.
+				// Terraform will leave the map empty if there are no sensitive fields, but still list the map itself.
+				if isSanitizationSensitiveValue(value) {
+					// take the safe route and assume anything else is sensitive
+					mergedSensitive[key] = value
+				}
+			}
+		}
+
+		// Check if AfterSensitive is of the expected type
+		if afterSensitiveOK {
+			// Copy the keys and values from AfterSensitive to the mergedSensitive map.
+			for key, value := range afterSensitive {
+				// if value is non boolean, that means the terraform attribute was a map.
+				// if a map, then it is only valid if it has fields assigned.
+				// Terraform will leave the map empty if there are no sensitive fields, but still list the map itself.
+				if isSanitizationSensitiveValue(value) {
+					mergedSensitive[key] = value
+				}
+			}
+		}
+
+		// Perform sanitization
+		sanitizedChangesJson, err := sanitizeResourceChanges(resource.Change, mergedSensitive)
+		if err != nil {
+			sanitizedChangesJson = "Error sanitizing sensitive data"
+			logger.Log(options.Testing, sanitizedChangesJson)
+		}
+		formatChangesJson, err := common.FormatJsonStringPretty(sanitizedChangesJson)
+
+		var formatChangesJsonString string
+		if err != nil {
+			logger.Log(options.Testing, "Error formatting JSON, use unformatted")
+			formatChangesJsonString = sanitizedChangesJson
+		} else {
+			formatChangesJsonString = string(formatChangesJson)
+		}
+
+		diff, diffErr := common.GetBeforeAfterDiff(changesJson)
+
+		if diffErr != nil {
+			diff = fmt.Sprintf("Error getting diff: %s", diffErr)
+		} else {
+			// Split the changesJson into "Before" and "After" parts
+			beforeAfter := strings.Split(diff, "After: ")
+
+			// Perform sanitization on "After" part
+			var after string
+			if len(beforeAfter) > 1 {
+				after, err = common.SanitizeSensitiveData(beforeAfter[1], mergedSensitive)
+				handleSanitizationError(err, "after diff", options)
+			} else {
+				after = "Could not parse after from diff" // dont print incase diff contains sensitive values
+			}
+
+			// Perform sanitization on "Before" part
+			var before string
+			if len(beforeAfter) > 0 {
+				before, err = common.SanitizeSensitiveData(strings.TrimPrefix(beforeAfter[0], "Before: "), mergedSensitive)
+				handleSanitizationError(err, "before diff", options)
+			} else {
+				before = "Could not parse before from diff" // dont print incase diff contains sensitive values
+			}
+
+			// Reassemble the sanitized diff string
+			diff = "  Before: \n\t" + before + "\n  After: \n\t" + after
+		}
+		resourceDetails = fmt.Sprintf("\nName: %s\nAddress: %s\nActions: %s\nDIFF:\n%s\n\nChange Detail:\n%s", resource.Name, resource.Address, resource.Change.Actions, diff, formatChangesJsonString)
+
+		var errorMessage string
+		if !options.IgnoreDestroys.IsExemptedResource(resource.Address) {
+			errorMessage = fmt.Sprintf("Resource(s) identified to be destroyed %s", resourceDetails)
+			assert.False(options.Testing, resource.Change.Actions.Delete(), errorMessage)
+			assert.False(options.Testing, resource.Change.Actions.DestroyBeforeCreate(), errorMessage)
+			assert.False(options.Testing, resource.Change.Actions.CreateBeforeDestroy(), errorMessage)
+			validChange = true
+		}
+		if !options.IgnoreUpdates.IsExemptedResource(resource.Address) {
+			errorMessage = fmt.Sprintf("Resource(s) identified to be updated %s", resourceDetails)
+			assert.False(options.Testing, resource.Change.Actions.Update(), errorMessage)
+			validChange = true
+		}
+		// We only want to check pure Adds (creates without destroy) if the consistency test is
+		// NOT the result of an Upgrade, as some adds are expected when doing the Upgrade test
+		// (such as new resources were added as part of the pull request)
+		if !options.IsUpgradeTest {
+			if !options.IgnoreAdds.IsExemptedResource(resource.Address) {
+				errorMessage = fmt.Sprintf("Resource(s) identified to be created %s", resourceDetails)
+				assert.False(options.Testing, resource.Change.Actions.Create(), errorMessage)
+				validChange = true
+			}
+		}
+	}
+
+	return validChange
+}
+
+// sanitizeResourceChanges sanitizes the sensitive data in a Terraform JSON Change and returns the sanitized JSON.
+func sanitizeResourceChanges(change *tfjson.Change, mergedSensitive map[string]interface{}) (string, error) {
+	// Marshal the Change to JSON bytes
+	changesBytes, err := json.MarshalIndent(change, "", "  ")
+	if err != nil {
+		return "", err
+	}
+	changesJson := string(changesBytes)
+
+	// Perform sanitization of sensitive data
+	changesJson, err = common.SanitizeSensitiveData(changesJson, mergedSensitive)
+	return changesJson, err
+}
+
+// handleSanitizationError logs an error message if a sanitization error occurs.
+func handleSanitizationError(err error, location string, options *CheckConsistencyOptions) {
+	if err != nil {
+		errorMessage := fmt.Sprintf("Error sanitizing sensitive data in %s", location)
+		logger.Log(options.Testing, errorMessage)
+	}
+}
+
+// isSanitizationSensitiveValue will look at the value data type of an attribute identified as sensitive in a TF plan
+// only boolean values or maps with one or more fields are considered sensitive.
+func isSanitizationSensitiveValue(value interface{}) bool {
+	isSensitive := true // take safe route
+	// if value is non boolean, that means the terraform attribute was a map.
+	// if a map, then it is only valid if it has fields assigned.
+	// Terraform will leave the map empty if there are no sensitive fields, but still list the map itself.
+
+	//lint:ignore S1034 we do not have need for the value of the type
+	switch value.(type) {
+	case bool:
+		isSensitive = true
+	case map[string]interface{}:
+		// if a map, check if length > 0 to see if this map has at least one sensitive field
+		if len(value.(map[string]interface{})) > 0 {
+			isSensitive = true
+		} else {
+			isSensitive = false
+		}
+	default:
+		// take the safe route and assume anything else is sensitive
+		isSensitive = true
+	}
+
+	return isSensitive
+}
diff --git a/testhelper/terraform_test.go b/testhelper/terraform_test.go
index 9eda2efd..29b8ad3c 100644
--- a/testhelper/terraform_test.go
+++ b/testhelper/terraform_test.go
@@ -1,8 +1,9 @@
 package testhelper
 
 import (
-	"github.com/stretchr/testify/assert"
 	"testing"
+
+	"github.com/stretchr/testify/assert"
 )
 
 func TestGetTerraformOutputs(t *testing.T) {
@@ -40,7 +41,7 @@ func TestGetTerraformOutputs(t *testing.T) {
 		missingKeys, err := ValidateTerraformOutputs(outputs, expectedKeys...)
 		assert.Contains(t, missingKeys, "test4")
 		assert.Error(t, err)
-		assert.Equal(t, "Output: \x1b[1;34m'test4'\x1b[0m was not expected to be nil\n", err.Error())
+		assert.Equal(t, "output: \x1b[1;34m'test4'\x1b[0m was not expected to be nil", err.Error())
 	})
 
 	t.Run("Not all outputs exist", func(t *testing.T) {
@@ -57,7 +58,7 @@ func TestGetTerraformOutputs(t *testing.T) {
 		missingKeys, err := ValidateTerraformOutputs(outputs, expectedKeys...)
 		assert.Contains(t, missingKeys, "test4")
 		assert.Error(t, err)
-		assert.Equal(t, "Output: \x1b[1;34m'test4'\x1b[0m was not found\n", err.Error())
+		assert.Equal(t, "output: \x1b[1;34m'test4'\x1b[0m was not found", err.Error())
 	})
 
 	t.Run("Mixed errors", func(t *testing.T) {
@@ -76,6 +77,6 @@ func TestGetTerraformOutputs(t *testing.T) {
 		missingKeys, err := ValidateTerraformOutputs(outputs, expectedKeys...)
 		assert.Contains(t, missingKeys, "test4")
 		assert.Error(t, err)
-		assert.Equal(t, "Output: \u001B[1;34m'test3'\u001B[0m was not expected to be blank string\nOutput: \x1b[1;34m'test4'\x1b[0m was not found\nOutput: \x1b[1;34m'test5'\x1b[0m was not found\nOutput: \x1b[1;34m'test6'\x1b[0m was not expected to be nil\nOutput: \u001B[1;34m'test7'\u001B[0m was not expected to be blank string\n", err.Error())
+		assert.Equal(t, "output: \u001B[1;34m'test3'\u001B[0m was not expected to be blank string\noutput: \x1b[1;34m'test4'\x1b[0m was not found\noutput: \x1b[1;34m'test5'\x1b[0m was not found\noutput: \x1b[1;34m'test6'\x1b[0m was not expected to be nil\noutput: \u001B[1;34m'test7'\u001B[0m was not expected to be blank string", err.Error())
 	})
 }
diff --git a/testhelper/test_options.go b/testhelper/test_options.go
index 67727a54..f0e808d4 100644
--- a/testhelper/test_options.go
+++ b/testhelper/test_options.go
@@ -173,6 +173,43 @@ type TestOptions struct {
 	PostDestroyHook func(options *TestOptions) error
 }
 
+type CheckConsistencyOptions struct {
+	// REQUIRED: a pointer to an initialized testing object.
+	// Typically, you would assign the test object used in the unit test.
+	Testing *testing.T
+
+	// For Consistency Checks: Specify terraform resource names to ignore for consistency checks.
+	// You can ignore specific resources in both idempotent and upgrade consistency checks by adding their names to these
+	// lists. There are separate lists for adds, updates, and destroys.
+	//
+	// This can be useful if you have resources like `null_resource` that are marked with a lifecycle that causes a refresh on every run.
+	// Normally this would fail a consistency check but can be ignored by adding to one of these lists.
+	//
+	// Name format is terraform style, for example: `module.some_module.null_resource.foo`
+	IgnoreAdds     Exemptions
+	IgnoreDestroys Exemptions
+	IgnoreUpdates  Exemptions
+
+	IsUpgradeTest bool // Identifies if current test is an UPGRADE test, used for special processing
+}
+
+// CheckConsistencyOptionsI is an interface that a testoption struct can implement that will return the appropriate
+// CheckConsistencyOptions object populated with correct values
+type CheckConsistencyOptionsI interface {
+	GetCheckConsistencyOptions() *CheckConsistencyOptions
+}
+
+// To support consistency check options interface
+func (options *TestOptions) GetCheckConsistencyOptions() *CheckConsistencyOptions {
+	return &CheckConsistencyOptions{
+		Testing:        options.Testing,
+		IgnoreAdds:     options.IgnoreAdds,
+		IgnoreDestroys: options.IgnoreDestroys,
+		IgnoreUpdates:  options.IgnoreUpdates,
+		IsUpgradeTest:  options.IsUpgradeTest,
+	}
+}
+
 // Default constructor for TestOptions struct. This constructor takes in an existing TestOptions object with minimal values set, and returns
 // a new object that has amended or new values set.
 //
diff --git a/testhelper/tests.go b/testhelper/tests.go
index f3c3cd57..116fa63e 100644
--- a/testhelper/tests.go
+++ b/testhelper/tests.go
@@ -1,7 +1,6 @@
 package testhelper
 
 import (
-	"encoding/json"
 	"fmt"
 	"os"
 	"os/exec"
@@ -12,7 +11,6 @@ import (
 
 	"github.com/IBM/platform-services-go-sdk/resourcecontrollerv2"
 	"github.com/gruntwork-io/terratest/modules/random"
-	tfjson "github.com/hashicorp/terraform-json"
 
 	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/cloudinfo"
 
@@ -64,150 +62,6 @@ func (options *TestOptions) skipUpgradeTest(source_repo string, source_branch st
 	return doNotRunUpgradeTest
 }
 
-// sanitizeResourceChanges sanitizes the sensitive data in a Terraform JSON Change and returns the sanitized JSON.
-func sanitizeResourceChanges(change *tfjson.Change, mergedSensitive map[string]interface{}) (string, error) {
-	// Marshal the Change to JSON bytes
-	changesBytes, err := json.MarshalIndent(change, "", "  ")
-	if err != nil {
-		return "", err
-	}
-	changesJson := string(changesBytes)
-
-	// Perform sanitization of sensitive data
-	changesJson, err = common.SanitizeSensitiveData(changesJson, mergedSensitive)
-	return changesJson, err
-}
-
-// handleSanitizationError logs an error message if a sanitization error occurs.
-func handleSanitizationError(err error, location string, options *TestOptions) {
-	if err != nil {
-		errorMessage := fmt.Sprintf("Error sanitizing sensitive data in %s", location)
-		logger.Log(options.Testing, errorMessage)
-	}
-}
-
-// checkConsistency Fails the test if any destroys are detected and the resource is not exempt.
-// If any addresses are provided in IgnoreUpdates.List then fail on updates too unless the resource is exempt
-func (options *TestOptions) CheckConsistency(plan *terraform.PlanStruct) {
-	options.checkConsistency(plan)
-}
-
-// checkConsistency check consistency
-func (options *TestOptions) checkConsistency(plan *terraform.PlanStruct) {
-	validChange := false
-
-	for _, resource := range plan.ResourceChangesMap {
-		// get JSON string of full changes for the logs
-		changesBytes, changesErr := json.MarshalIndent(resource.Change, "", "  ")
-		// if it errors in the marshall step, just put a placeholder and move on, not important
-		changesJson := "--UNAVAILABLE--"
-		if changesErr == nil {
-			changesJson = string(changesBytes)
-		}
-
-		var resourceDetails string
-
-		// Treat all keys in the BeforeSensitive and AfterSensitive maps as sensitive
-		// Assuming BeforeSensitive and AfterSensitive are of type interface{}
-		beforeSensitive, beforeSensitiveOK := resource.Change.BeforeSensitive.(map[string]interface{})
-		afterSensitive, afterSensitiveOK := resource.Change.AfterSensitive.(map[string]interface{})
-
-		// Create the mergedSensitive map
-		mergedSensitive := make(map[string]interface{})
-
-		// Check if BeforeSensitive is of the expected type
-		if beforeSensitiveOK {
-			// Copy the keys and values from BeforeSensitive to the mergedSensitive map.
-			for key, value := range beforeSensitive {
-				mergedSensitive[key] = value
-			}
-		}
-
-		// Check if AfterSensitive is of the expected type
-		if afterSensitiveOK {
-			// Copy the keys and values from AfterSensitive to the mergedSensitive map.
-			for key, value := range afterSensitive {
-				mergedSensitive[key] = value
-			}
-		}
-
-		// Perform sanitization
-		sanitizedChangesJson, err := sanitizeResourceChanges(resource.Change, mergedSensitive)
-		if err != nil {
-			sanitizedChangesJson = "Error sanitizing sensitive data"
-			logger.Log(options.Testing, sanitizedChangesJson)
-		}
-		formatChangesJson, err := common.FormatJsonStringPretty(sanitizedChangesJson)
-
-		var formatChangesJsonString string
-		if err != nil {
-			logger.Log(options.Testing, "Error formatting JSON, use unformatted")
-			formatChangesJsonString = sanitizedChangesJson
-		} else {
-			formatChangesJsonString = string(formatChangesJson)
-		}
-
-		diff, diffErr := common.GetBeforeAfterDiff(changesJson)
-
-		if diffErr != nil {
-			diff = fmt.Sprintf("Error getting diff: %s", diffErr)
-		} else {
-			// Split the changesJson into "Before" and "After" parts
-			beforeAfter := strings.Split(diff, "After: ")
-
-			// Perform sanitization on "After" part
-			var after string
-			if len(beforeAfter) > 1 {
-				after, err = common.SanitizeSensitiveData(beforeAfter[1], mergedSensitive)
-				handleSanitizationError(err, "after diff", options)
-			} else {
-				after = fmt.Sprintf("Could not parse after from diff") // dont print incase diff contains sensitive values
-			}
-
-			// Perform sanitization on "Before" part
-			var before string
-			if len(beforeAfter) > 0 {
-				before, err = common.SanitizeSensitiveData(strings.TrimPrefix(beforeAfter[0], "Before: "), mergedSensitive)
-				handleSanitizationError(err, "before diff", options)
-			} else {
-				before = fmt.Sprintf("Could not parse before from diff") // dont print incase diff contains sensitive values
-			}
-
-			// Reassemble the sanitized diff string
-			diff = "  Before: \n\t" + before + "\n  After: \n\t" + after
-		}
-		resourceDetails = fmt.Sprintf("\nName: %s\nAddress: %s\nActions: %s\nDIFF:\n%s\n\nChange Detail:\n%s", resource.Name, resource.Address, resource.Change.Actions, diff, formatChangesJsonString)
-
-		var errorMessage string
-		if !options.IgnoreDestroys.IsExemptedResource(resource.Address) {
-			errorMessage = fmt.Sprintf("Resource(s) identified to be destroyed %s", resourceDetails)
-			assert.False(options.Testing, resource.Change.Actions.Delete(), errorMessage)
-			assert.False(options.Testing, resource.Change.Actions.DestroyBeforeCreate(), errorMessage)
-			assert.False(options.Testing, resource.Change.Actions.CreateBeforeDestroy(), errorMessage)
-			validChange = true
-		}
-		if !options.IgnoreUpdates.IsExemptedResource(resource.Address) {
-			errorMessage = fmt.Sprintf("Resource(s) identified to be updated %s", resourceDetails)
-			assert.False(options.Testing, resource.Change.Actions.Update(), errorMessage)
-			validChange = true
-		}
-		// We only want to check pure Adds (creates without destroy) if the consistency test is
-		// NOT the result of an Upgrade, as some adds are expected when doing the Upgrade test
-		// (such as new resources were added as part of the pull request)
-		if !options.IsUpgradeTest {
-			if !options.IgnoreAdds.IsExemptedResource(resource.Address) {
-				errorMessage = fmt.Sprintf("Resource(s) identified to be created %s", resourceDetails)
-				assert.False(options.Testing, resource.Change.Actions.Create(), errorMessage)
-				validChange = true
-			}
-		}
-	}
-	// Run plan again to output the nice human-readable plan if there are valid changes
-	if validChange {
-		terraform.Plan(options.Testing, options.TerraformOptions)
-	}
-}
-
 // Function to setup testing environment.
 //
 // Summary of settings:
@@ -376,7 +230,7 @@ func (options *TestOptions) testTearDown() {
 			}
 			logger.Log(options.Testing, "START: Destroy")
 			destroyOutput, destroyError := terraform.DestroyE(options.Testing, options.TerraformOptions)
-			if assert.NoError(options.Testing, destroyError) == false {
+			if !assert.NoError(options.Testing, destroyError) {
 				logger.Log(options.Testing, destroyError)
 				// On destroy resource group failure, list remaining resources
 				if common.StringContainsIgnoreCase(destroyError.Error(), "Error Deleting resource group") {
@@ -466,7 +320,7 @@ func (options *TestOptions) testTearDown() {
 
 // print_resources internal helper function that prints the resources in the resource group
 func print_resources(t *testing.T, resourceGroup string, resources []resourcecontrollerv2.ResourceInstance, err error) {
-	logger.Log(t, fmt.Sprintf("---------------------------"))
+	logger.Log(t, "---------------------------")
 	if err != nil {
 		logger.Log(t, fmt.Sprintf("Error listing resources in Resource Group %s, %s\n"+
 			"Is this Resource Group already deleted?", resourceGroup, err))
@@ -476,7 +330,7 @@ func print_resources(t *testing.T, resourceGroup string, resources []resourcecon
 		logger.Log(t, fmt.Sprintf("Resources in Resource Group %s:", resourceGroup))
 		cloudinfo.PrintResources(resources)
 	}
-	logger.Log(t, fmt.Sprintf("---------------------------"))
+	logger.Log(t, "---------------------------")
 }
 
 // RunTestUpgrade runs the upgrade test to ensure that the Terraform configurations being tested
@@ -763,7 +617,11 @@ func (options *TestOptions) RunTestUpgrade() (*terraform.PlanStruct, error) {
 		}
 
 		logger.Log(options.Testing, "Parsing plan output to determine if any resources identified for destroy (PR branch)...")
-		options.checkConsistency(result)
+		hasConsistencyChanges := CheckConsistency(result, options)
+
+		if hasConsistencyChanges {
+			terraform.Plan(options.Testing, options.TerraformOptions)
+		}
 
 		// Check if optional upgrade support on PR Branch is needed
 		if options.CheckApplyResultForUpgrade && !options.Testing.Failed() {
@@ -811,7 +669,12 @@ func (options *TestOptions) RunTestConsistency() (*terraform.PlanStruct, error)
 		options.testTearDown()
 		return result, err
 	}
-	options.checkConsistency(result)
+	hasConsistencyChanges := CheckConsistency(result, options)
+
+	if hasConsistencyChanges {
+		terraform.Plan(options.Testing, options.TerraformOptions)
+	}
+
 	logger.Log(options.Testing, "FINISHED: Init / Apply / Consistency Check")
 
 	options.testTearDown()
diff --git a/testhelper/tf_exempt_test.go b/testhelper/tf_exempt_test.go
index 5cbd7959..ff683449 100644
--- a/testhelper/tf_exempt_test.go
+++ b/testhelper/tf_exempt_test.go
@@ -1,8 +1,9 @@
 package testhelper
 
 import (
-	"github.com/stretchr/testify/assert"
 	"testing"
+
+	"github.com/stretchr/testify/assert"
 )
 
 var ex = Exemptions{List: []string{"i_am_exempt", "i.am.also.exempt"}}
diff --git a/testprojects/tests.go b/testprojects/tests.go
index 261c03b4..95cbb625 100644
--- a/testprojects/tests.go
+++ b/testprojects/tests.go
@@ -236,7 +236,7 @@ func (options *TestProjectsOptions) TriggerDeployAndWait() (errorList []error) {
 			case project.ProjectConfig_State_ValidatingFailed, project.ProjectConfig_State_DeployingFailed:
 				deployableState = false
 				failed = true
-				logMessage, terraLogs := options.CloudInfoService.GetSchematicsJobLogsForMember(member, memberName)
+				logMessage, terraLogs := options.CloudInfoService.GetSchematicsJobLogsForMember(member, memberName, options.currentProjectConfig.Location)
 				options.Logger.ShortError(terraLogs)
 				errorList = append(errorList, fmt.Errorf(logMessage))
 			case project.ProjectConfig_State_Draft:
@@ -553,7 +553,7 @@ func (options *TestProjectsOptions) TriggerUnDeployAndWait() (errorList []error)
 						memberStates = append(memberStates, fmt.Sprintf("%s%s current state: %s", memberLabel, memberName, Statuses[*member.State]))
 						undeployableState = false
 						failed = true
-						logMessage, terraLogs := options.CloudInfoService.GetSchematicsJobLogsForMember(member, memberName)
+						logMessage, terraLogs := options.CloudInfoService.GetSchematicsJobLogsForMember(member, memberName, options.currentProjectConfig.Location)
 						options.Logger.ShortError(terraLogs)
 						errorList = append(errorList, fmt.Errorf("(%s) failed Undeployment\n%s", memberName, logMessage))
 					} else if cloudinfo.ProjectsMemberIsUndeployed(member) {
diff --git a/testschematic/mock_test.go b/testschematic/mock_test.go
index 115ede92..86e69799 100644
--- a/testschematic/mock_test.go
+++ b/testschematic/mock_test.go
@@ -2,14 +2,18 @@ package testschematic
 
 import (
 	"net/http"
+	"sync"
 	"testing"
 	"time"
 
 	"github.com/IBM/go-sdk-core/v5/core"
+	"github.com/IBM/platform-services-go-sdk/catalogmanagementv1"
+	projects "github.com/IBM/project-go-sdk/projectv1"
 	schematics "github.com/IBM/schematics-go-sdk/schematicsv1"
 	"github.com/go-openapi/strfmt"
 	"github.com/go-openapi/strfmt/conv"
 	"github.com/stretchr/testify/mock"
+	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/cloudinfo"
 )
 
 const mockWorkspaceID = "ws12345"
@@ -254,3 +258,166 @@ func (mock *iamAuthenticatorMock) RequestToken() (*core.IamTokenServerResponse,
 
 	return retval, nil
 }
+
+/**** START MOCK CloudInfoService ****/
+type cloudInfoServiceMock struct {
+	mock.Mock
+	cloudinfo.CloudInfoServiceI
+	lock sync.Mutex
+}
+
+func (mock *cloudInfoServiceMock) CreateStackDefinitionWrapper(stackDefOptions *projects.CreateStackDefinitionOptions, members []projects.StackConfigMember) (result *projects.StackDefinition, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) LoadRegionPrefsFromFile(prefsFile string) error {
+	return nil
+}
+
+func (mock *cloudInfoServiceMock) GetLeastVpcTestRegion() (string, error) {
+	return "us-south", nil
+}
+
+func (mock *cloudInfoServiceMock) GetLeastVpcTestRegionWithoutActivityTracker() (string, error) {
+	return "us-east", nil
+}
+
+func (mock *cloudInfoServiceMock) GetLeastPowerConnectionZone() (string, error) {
+	return "us-south", nil
+}
+
+func (mock *cloudInfoServiceMock) HasRegionData() bool {
+	return false
+}
+
+func (mock *cloudInfoServiceMock) RemoveRegionForTest(regionID string) {
+	// nothing to really do here
+}
+
+func (mock *cloudInfoServiceMock) GetThreadLock() *sync.Mutex {
+	return &mock.lock
+}
+
+func (mock *cloudInfoServiceMock) GetCatalogVersionByLocator(string) (*catalogmanagementv1.Version, error) {
+	return nil, nil
+}
+func (mock *cloudInfoServiceMock) CreateProjectFromConfig(*cloudinfo.ProjectsConfig) (*projects.Project, *core.DetailedResponse, error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) GetProject(string) (*projects.Project, *core.DetailedResponse, error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) GetProjectConfigs(string) ([]projects.ProjectConfigSummary, error) {
+	return nil, nil
+}
+
+func (mock *cloudInfoServiceMock) GetConfig(*cloudinfo.ConfigDetails) (result *projects.ProjectConfig, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) DeleteProject(string) (*projects.ProjectDeleteResponse, *core.DetailedResponse, error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) CreateConfig(*cloudinfo.ConfigDetails) (result *projects.ProjectConfig, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) DeployConfig(*cloudinfo.ConfigDetails) (result *projects.ProjectConfigVersion, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) CreateDaConfig(*cloudinfo.ConfigDetails) (result *projects.ProjectConfig, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) CreateConfigFromCatalogJson(*cloudinfo.ConfigDetails, string) (result *projects.ProjectConfig, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) UpdateConfig(*cloudinfo.ConfigDetails, projects.ProjectConfigDefinitionPatchIntf) (result *projects.ProjectConfig, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) ValidateProjectConfig(*cloudinfo.ConfigDetails) (result *projects.ProjectConfigVersion, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) IsConfigDeployed(*cloudinfo.ConfigDetails) (projectConfig *projects.ProjectConfigVersion, isDeployed bool) {
+	return nil, false
+}
+
+func (mock *cloudInfoServiceMock) UndeployConfig(*cloudinfo.ConfigDetails) (result *projects.ProjectConfigVersion, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) IsUndeploying(*cloudinfo.ConfigDetails) (projectConfig *projects.ProjectConfigVersion, isUndeploying bool) {
+	return nil, false
+}
+
+func (mock *cloudInfoServiceMock) CreateStackFromConfigFile(*cloudinfo.ConfigDetails, string, string) (result *projects.StackDefinition, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) GetProjectConfigVersion(*cloudinfo.ConfigDetails, int64) (result *projects.ProjectConfigVersion, response *core.DetailedResponse, err error) {
+	return nil, nil, nil
+}
+
+func (mock *cloudInfoServiceMock) GetStackMembers(stackConfig *cloudinfo.ConfigDetails) ([]*projects.ProjectConfig, error) {
+	return nil, nil
+}
+
+func (mock *cloudInfoServiceMock) SyncConfig(projectID string, configID string) (response *core.DetailedResponse, err error) {
+	return nil, nil
+}
+
+func (mock *cloudInfoServiceMock) LookupMemberNameByID(stackDetails *projects.ProjectConfig, memberID string) (string, error) {
+	return "", nil
+}
+
+func (mock *cloudInfoServiceMock) GetClusterIngressStatus(string) (string, error) {
+	return "", nil
+}
+
+func (mock *cloudInfoServiceMock) GetSchematicsJobLogs(string, string) (*schematics.JobLog, *core.DetailedResponse, error) {
+	return nil, nil, nil
+
+}
+func (mock *cloudInfoServiceMock) GetSchematicsJobLogsText(string, string) (string, error) {
+	return "", nil
+}
+
+func (mock *cloudInfoServiceMock) ArePipelineActionsRunning(stackConfig *cloudinfo.ConfigDetails) (bool, error) {
+	return false, nil
+}
+
+func (mock *cloudInfoServiceMock) GetSchematicsJobLogsForMember(member *projects.ProjectConfig, memberName string, projectRegion string) (string, string) {
+	return "", ""
+}
+
+// special mock for CreateStackDefinition
+// we do not have enough information when mocking projectv1.CreateStackDefinition to return a valid response
+// to get around this we create a wrapper that can take in the missing list of members that can be used in the mock
+// to return a valid response
+
+func (mock *cloudInfoServiceMock) CreateStackDefinition(stackDefOptions *projects.CreateStackDefinitionOptions, members []projects.StackConfigMember) (result *projects.StackDefinition, response *core.DetailedResponse, err error) {
+	args := mock.Called(stackDefOptions, members)
+	return args.Get(0).(*projects.StackDefinition), args.Get(1).(*core.DetailedResponse), args.Error(2)
+}
+
+func (mock *cloudInfoServiceMock) GetSchematicsJobFileData(jobID string, fileType string, location string) (*schematics.JobFileData, error) {
+	dummyFile := &schematics.JobFileData{
+		JobID:       core.StringPtr(jobID),
+		FileContent: core.StringPtr("testing 1 2 3"),
+	}
+	return dummyFile, nil
+}
+
+func (mock *cloudInfoServiceMock) GetSchematicsJobPlanJson(jobID string, location string) (string, error) {
+	// needed a valid json for marshalling, this is the terraform-ibm-resource-group plan with no changes
+	return "{\"format_version\":\"1.2\",\"terraform_version\":\"1.9.2\",\"variables\":{\"ibmcloud_api_key\":{\"value\":\"dummy-key\"},\"resource_group_name\":{\"value\":\"geretain-test-resources\"}},\"planned_values\":{\"outputs\":{\"resource_group_id\":{\"sensitive\":false,\"type\":\"string\",\"value\":\"292170bc79c94f5e9019e46fb48f245a\"},\"resource_group_name\":{\"sensitive\":false,\"type\":\"string\",\"value\":\"geretain-test-resources\"}},\"root_module\":{}},\"output_changes\":{\"resource_group_id\":{\"actions\":[\"no-op\"],\"before\":\"292170bc79c94f5e9019e46fb48f245a\",\"after\":\"292170bc79c94f5e9019e46fb48f245a\",\"after_unknown\":false,\"before_sensitive\":false,\"after_sensitive\":false},\"resource_group_name\":{\"actions\":[\"no-op\"],\"before\":\"geretain-test-resources\",\"after\":\"geretain-test-resources\",\"after_unknown\":false,\"before_sensitive\":false,\"after_sensitive\":false}},\"prior_state\":{\"format_version\":\"1.0\",\"terraform_version\":\"1.9.2\",\"values\":{\"outputs\":{\"resource_group_id\":{\"sensitive\":false,\"value\":\"292170bc79c94f5e9019e46fb48f245a\",\"type\":\"string\"},\"resource_group_name\":{\"sensitive\":false,\"value\":\"geretain-test-resources\",\"type\":\"string\"}},\"root_module\":{\"child_modules\":[{\"resources\":[{\"address\":\"module.resource_group.data.ibm_resource_group.existing_resource_group[0]\",\"mode\":\"data\",\"type\":\"ibm_resource_group\",\"name\":\"existing_resource_group\",\"index\":0,\"provider_name\":\"registry.terraform.io/ibm-cloud/ibm\",\"schema_version\":0,\"values\":{\"account_id\":\"abac0df06b644a9cabc6e44f55b3880e\",\"created_at\":\"2022-08-04T16:52:02.227Z\",\"crn\":\"crn:v1:bluemix:public:resource-controller::a/abac0df06b644a9cabc6e44f55b3880e::resource-group:292170bc79c94f5e9019e46fb48f245a\",\"id\":\"292170bc79c94f5e9019e46fb48f245a\",\"is_default\":false,\"name\":\"geretain-test-resources\",\"payment_methods_url\":null,\"quota_id\":\"a3d7b8d01e261c24677937c29ab33f3c\",\"quota_url\":\"/v2/quota_definitions/a3d7b8d01e261c24677937c29ab33f3c\",\"resource_linkages\":[],\"state\":\"ACTIVE\",\"teams_url\":null,\"updated_at\":\"2022-08-04T16:52:02.227Z\"},\"sensitive_values\":{\"resource_linkages\":[]}}],\"address\":\"module.resource_group\"}]}}},\"configuration\":{\"provider_config\":{\"ibm\":{\"name\":\"ibm\",\"full_name\":\"registry.terraform.io/ibm-cloud/ibm\",\"version_constraint\":\"1.49.0\",\"expressions\":{\"ibmcloud_api_key\":{\"references\":[\"var.ibmcloud_api_key\"]}}}},\"root_module\":{\"outputs\":{\"resource_group_id\":{\"expression\":{\"references\":[\"module.resource_group.resource_group_id\",\"module.resource_group\"]},\"description\":\"Resource group ID\"},\"resource_group_name\":{\"expression\":{\"references\":[\"module.resource_group.resource_group_name\",\"module.resource_group\"]},\"description\":\"Resource group name\"}},\"module_calls\":{\"resource_group\":{\"source\":\"../../\",\"expressions\":{\"existing_resource_group_name\":{\"references\":[\"var.resource_group_name\"]}},\"module\":{\"outputs\":{\"resource_group_id\":{\"expression\":{\"references\":[\"var.existing_resource_group_name\",\"data.ibm_resource_group.existing_resource_group[0].id\",\"data.ibm_resource_group.existing_resource_group[0]\",\"data.ibm_resource_group.existing_resource_group\",\"ibm_resource_group.resource_group[0].id\",\"ibm_resource_group.resource_group[0]\",\"ibm_resource_group.resource_group\"]},\"description\":\"Resource group ID\"},\"resource_group_name\":{\"expression\":{\"references\":[\"var.existing_resource_group_name\",\"data.ibm_resource_group.existing_resource_group[0].name\",\"data.ibm_resource_group.existing_resource_group[0]\",\"data.ibm_resource_group.existing_resource_group\",\"ibm_resource_group.resource_group[0].name\",\"ibm_resource_group.resource_group[0]\",\"ibm_resource_group.resource_group\"]},\"description\":\"Resource group name\"}},\"resources\":[{\"address\":\"ibm_resource_group.resource_group\",\"mode\":\"managed\",\"type\":\"ibm_resource_group\",\"name\":\"resource_group\",\"provider_config_key\":\"ibm\",\"expressions\":{\"name\":{\"references\":[\"var.resource_group_name\"]},\"quota_id\":{\"constant_value\":null}},\"schema_version\":0,\"count_expression\":{\"references\":[\"var.existing_resource_group_name\"]}},{\"address\":\"data.ibm_resource_group.existing_resource_group\",\"mode\":\"data\",\"type\":\"ibm_resource_group\",\"name\":\"existing_resource_group\",\"provider_config_key\":\"ibm\",\"expressions\":{\"name\":{\"references\":[\"var.existing_resource_group_name\"]}},\"schema_version\":0,\"count_expression\":{\"references\":[\"var.existing_resource_group_name\"]}}],\"variables\":{\"existing_resource_group_name\":{\"default\":null,\"description\":\"Name of the existing resource group.  Required if not creating new resource group\"},\"resource_group_name\":{\"default\":null,\"description\":\"Name of the resource group to create. Required if not using existing resource group\"}}}}},\"variables\":{\"ibmcloud_api_key\":{\"description\":\"The IBM Cloud API Token\",\"sensitive\":true},\"resource_group_name\":{\"description\":\"Resource group name\"}}}},\"relevant_attributes\":[{\"resource\":\"module.resource_group.data.ibm_resource_group.existing_resource_group[0]\",\"attribute\":[\"name\"]},{\"resource\":\"module.resource_group.ibm_resource_group.resource_group[0]\",\"attribute\":[\"name\"]},{\"resource\":\"module.resource_group.data.ibm_resource_group.existing_resource_group[0]\",\"attribute\":[\"id\"]},{\"resource\":\"module.resource_group.ibm_resource_group.resource_group[0]\",\"attribute\":[\"id\"]}],\"timestamp\":\"2024-11-13T21:02:28Z\",\"applyable\":false,\"complete\":true,\"errored\":false}", nil
+}
+
+/**** END MOCK CloudInfoService ****/
diff --git a/testschematic/schematics.go b/testschematic/schematics.go
index 28b2dce5..05a88525 100644
--- a/testschematic/schematics.go
+++ b/testschematic/schematics.go
@@ -16,6 +16,7 @@ import (
 	schematics "github.com/IBM/schematics-go-sdk/schematicsv1"
 	"github.com/go-openapi/errors"
 	"github.com/gruntwork-io/terratest/modules/random"
+	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/cloudinfo"
 	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/common"
 )
 
@@ -33,7 +34,7 @@ const SchematicsJobStatusInProgress = "INPROGRESS"
 
 // Defaults for API retry mechanic
 const defaultApiRetryCount int = 5
-const defaultApiRetryWaitSeconds int = 10
+const defaultApiRetryWaitSeconds int = 30
 
 // golang does not support constant array/slice, this is our constant
 func getApiRetryStatusExceptions() []int {
@@ -64,14 +65,16 @@ type IamAuthenticatorSvcI interface {
 
 // main data struct for all schematic test methods
 type SchematicsTestService struct {
-	SchematicsApiSvc          SchematicsApiSvcI     // the main schematics service interface
-	ApiAuthenticator          IamAuthenticatorSvcI  // the authenticator used for schematics api calls
-	WorkspaceID               string                // workspace ID used for tests
-	WorkspaceName             string                // name of workspace that was created for test
-	TemplateID                string                // workspace template ID used for tests
-	TestOptions               *TestSchematicOptions // additional testing options
-	TerraformTestStarted      bool                  // keeps track of when actual Terraform resource testing has begin, used for proper test teardown logic
-	TerraformResourcesCreated bool                  // keeps track of when we start deploying resources, used for proper test teardown logic
+	SchematicsApiSvc          SchematicsApiSvcI           // the main schematics service interface
+	ApiAuthenticator          IamAuthenticatorSvcI        // the authenticator used for schematics api calls
+	WorkspaceID               string                      // workspace ID used for tests
+	WorkspaceName             string                      // name of workspace that was created for test
+	WorkspaceLocation         string                      // region the workspace was created in
+	TemplateID                string                      // workspace template ID used for tests
+	TestOptions               *TestSchematicOptions       // additional testing options
+	TerraformTestStarted      bool                        // keeps track of when actual Terraform resource testing has begin, used for proper test teardown logic
+	TerraformResourcesCreated bool                        // keeps track of when we start deploying resources, used for proper test teardown logic
+	CloudInfoService          cloudinfo.CloudInfoServiceI // reference to a CloudInfoService resource
 }
 
 // CreateAuthenticator will accept a valid IBM cloud API key, and
@@ -104,8 +107,26 @@ func (svc *SchematicsTestService) GetRefreshToken() (string, error) {
 // for schematicsv1 and assign it to a property of the receiver for later use.
 func (svc *SchematicsTestService) InitializeSchematicsService() error {
 	var err error
+	var getUrlErr error
+	var schematicsURL string // will default to empty which is ok
+
+	// if override of URL was not provided, determine correct one by workspace region that was chosen
+	if len(svc.TestOptions.SchematicsApiURL) > 0 {
+		schematicsURL = svc.TestOptions.SchematicsApiURL
+	} else {
+		if len(svc.WorkspaceLocation) > 0 {
+			schematicsURL, getUrlErr = cloudinfo.GetSchematicServiceURLForRegion(svc.WorkspaceLocation)
+			if getUrlErr != nil {
+				return fmt.Errorf("error getting schematics URL for region %s - %w", svc.WorkspaceLocation, getUrlErr)
+			}
+		} else {
+			schematicsURL = schematics.DefaultServiceURL
+		}
+	}
+	svc.TestOptions.Testing.Logf("[SCHEMATICS] Schematics API for region %s: %s", svc.WorkspaceLocation, schematicsURL)
+
 	svc.SchematicsApiSvc, err = schematics.NewSchematicsV1(&schematics.SchematicsV1Options{
-		URL:           svc.TestOptions.SchematicsApiURL,
+		URL:           schematicsURL,
 		Authenticator: svc.ApiAuthenticator,
 	})
 	if err != nil {
@@ -116,19 +137,20 @@ func (svc *SchematicsTestService) InitializeSchematicsService() error {
 }
 
 // CreateTestWorkspace will create a new IBM Schematics Workspace that will be used for testing.
-func (svc *SchematicsTestService) CreateTestWorkspace(name string, resourceGroup string, templateFolder string, terraformVersion string, tags []string) (*schematics.WorkspaceResponse, error) {
+func (svc *SchematicsTestService) CreateTestWorkspace(name string, resourceGroup string, region string, templateFolder string, terraformVersion string, tags []string) (*schematics.WorkspaceResponse, error) {
 
 	var folder *string
 	var version *string
 	var wsVersion []string
-	// choose nil default for version if not supplied, so that they omit from template setup
-	// (schematics should then determine defaults)
+
 	if len(templateFolder) == 0 {
 		folder = core.StringPtr(".")
 	} else {
 		folder = core.StringPtr(templateFolder)
 	}
 
+	// choose nil default for version if not supplied, so that they omit from template setup
+	// (schematics should then determine defaults)
 	if len(terraformVersion) > 0 {
 		version = core.StringPtr(terraformVersion)
 		wsVersion = []string{terraformVersion}
@@ -164,7 +186,7 @@ func (svc *SchematicsTestService) CreateTestWorkspace(name string, resourceGroup
 		Name:          core.StringPtr(name),
 		TemplateData:  []schematics.TemplateSourceDataRequest{*templateModel},
 		Type:          wsVersion,
-		Location:      core.StringPtr(defaultRegion),
+		Location:      core.StringPtr(region),
 		ResourceGroup: core.StringPtr(resourceGroup),
 		Tags:          tags,
 	}
diff --git a/testschematic/schematics_test.go b/testschematic/schematics_test.go
index a4eaf8a7..931d4a05 100644
--- a/testschematic/schematics_test.go
+++ b/testschematic/schematics_test.go
@@ -63,14 +63,14 @@ func TestSchematicCreateWorkspace(t *testing.T) {
 	mockErrorType := new(schematicErrorMock)
 
 	t.Run("WorkspaceCreated", func(t *testing.T) {
-		result, err := svc.CreateTestWorkspace("good", "any-rg", ".", "terraform_v1.2", []string{"tag1", "tag2"})
+		result, err := svc.CreateTestWorkspace("good", "any-rg", "us-south", ".", "terraform_v1.2", []string{"tag1", "tag2"})
 		if assert.NoError(t, err) {
 			assert.Equal(t, mockWorkspaceID, *result.ID)
 		}
 	})
 
 	t.Run("WorkspaceCreatedEmptyDefaults", func(t *testing.T) {
-		result, err := svc.CreateTestWorkspace("good", "any-rg", "", "", []string{"tag1", "tag2"})
+		result, err := svc.CreateTestWorkspace("good", "any-rg", "", "", "", []string{"tag1", "tag2"})
 		if assert.NoError(t, err) {
 			assert.Equal(t, mockWorkspaceID, *result.ID)
 		}
@@ -78,7 +78,7 @@ func TestSchematicCreateWorkspace(t *testing.T) {
 
 	t.Run("ExternalServiceError", func(t *testing.T) {
 		schematicSvc.failCreateWorkspace = true
-		_, err := svc.CreateTestWorkspace("error", "any-rg", ".", "terraform_v1.2", []string{"tag1"})
+		_, err := svc.CreateTestWorkspace("error", "any-rg", "us-south", ".", "terraform_v1.2", []string{"tag1"})
 		assert.ErrorAs(t, err, &mockErrorType)
 	})
 }
diff --git a/testschematic/test_options.go b/testschematic/test_options.go
index 7afa8bab..1a64028e 100644
--- a/testschematic/test_options.go
+++ b/testschematic/test_options.go
@@ -20,7 +20,6 @@ const ibmcloudApiKeyVar = "TF_VAR_ibmcloud_api_key"
 const defaultGitUserEnvKey = "GIT_TOKEN_USER"
 const defaultGitTokenEnvKey = "GIT_TOKEN"
 const DefaultWaitJobCompleteMinutes = int16(120) // default 2 hrs wait time
-const DefaultSchematicsApiURL = "https://schematics.cloud.ibm.com"
 
 // TestSchematicOptions is the main data struct containing all options related to running a Terraform unit test wihtin IBM Schematics Workspaces
 type TestSchematicOptions struct {
@@ -55,6 +54,10 @@ type TestSchematicOptions struct {
 	// If left empty, this will be populated by dynamic region selection by default constructor and can be referenced later.
 	Region string
 
+	// Set this value to force a specific region for the Schematics Workspace.
+	// Default will choose a random valid region for the workspace.
+	WorkspaceLocation string
+
 	// Only required if using the WithVars constructor, as this value will then populate the `resource_group` input variable.
 	ResourceGroup string
 
@@ -85,13 +88,16 @@ type TestSchematicOptions struct {
 	WaitJobCompleteMinutes int16
 
 	// Base URL of the schematics REST API. Set to override default.
-	// Default: https://schematics.cloud.ibm.com
+	// Default will be based on the appropriate endpoint for the chosen `WorkspaceRegion`
 	SchematicsApiURL string
 
 	// Set this to true if you would like to delete the test Schematic Workspace if the test fails.
 	// By default this will be false, and if a failure happens the workspace and logs will be preserved for analysis.
 	DeleteWorkspaceOnFail bool
 
+	// If you want to skip test teardown (both resource destroy and workspace deletion)
+	SkipTestTearDown bool
+
 	// This value is used to set the terraform version attribute for the workspace and template.
 	// If left empty, an empty value will be set in the template which will cause the Schematic jobs to use the highest available version.
 	//
@@ -112,6 +118,18 @@ type TestSchematicOptions struct {
 	SchematicsApiSvc  SchematicsApiSvcI           // OPTIONAL: service pointer for interacting with external schematics api
 	schematicsTestSvc *SchematicsTestService      // internal property to specify pointer to test service, used for test mocking
 
+	// For Consistency Checks: Specify terraform resource names to ignore for consistency checks.
+	// You can ignore specific resources in both idempotent and upgrade consistency checks by adding their names to these
+	// lists. There are separate lists for adds, updates, and destroys.
+	//
+	// This can be useful if you have resources like `null_resource` that are marked with a lifecycle that causes a refresh on every run.
+	// Normally this would fail a consistency check but can be ignored by adding to one of these lists.
+	//
+	// Name format is terraform style, for example: `module.some_module.null_resource.foo`
+	IgnoreAdds     testhelper.Exemptions
+	IgnoreDestroys testhelper.Exemptions
+	IgnoreUpdates  testhelper.Exemptions
+
 	// These optional fields can be used to override the default retry settings for making Schematics API calls.
 	// If SDK/API calls to Schematics result in errors, such as retrieving existing workspace details,
 	// the test framework will retry those calls for a set number of times, with a wait time between calls.
@@ -121,6 +139,10 @@ type TestSchematicOptions struct {
 	// Current Default: 5 retries, 5 second wait
 	SchematicSvcRetryCount       *int
 	SchematicSvcRetryWaitSeconds *int
+
+	// By default the logs from schematics jobs will only be printed to the test log if there is a failure in the job.
+	// Set this value to `true` to have all schematics job logs (plan/apply/destroy) printed to the test log.
+	PrintAllSchematicsLogs bool
 }
 
 type TestSchematicTerraformVar struct {
@@ -143,6 +165,17 @@ type WorkspaceEnvironmentVariable struct {
 	Secure bool   // metadata to mark value as sensitive
 }
 
+// To support consistency check options interface
+func (options *TestSchematicOptions) GetCheckConsistencyOptions() *testhelper.CheckConsistencyOptions {
+	return &testhelper.CheckConsistencyOptions{
+		Testing:        options.Testing,
+		IgnoreAdds:     options.IgnoreAdds,
+		IgnoreDestroys: options.IgnoreDestroys,
+		IgnoreUpdates:  options.IgnoreUpdates,
+		IsUpgradeTest:  false,
+	}
+}
+
 // TestSchematicOptionsDefault is a constructor for struct TestSchematicOptions. This function will accept an existing instance of
 // TestSchematicOptions values, and return a new instance of TestSchematicOptions with the original values set along with appropriate
 // default values for any properties that were not set in the original options.
@@ -185,10 +218,6 @@ func TestSchematicOptionsDefault(originalOptions *TestSchematicOptions) *TestSch
 		newOptions.WaitJobCompleteMinutes = DefaultWaitJobCompleteMinutes
 	}
 
-	if len(newOptions.SchematicsApiURL) == 0 {
-		newOptions.SchematicsApiURL = DefaultSchematicsApiURL
-	}
-
 	return newOptions
 
 }
diff --git a/testschematic/tests.go b/testschematic/tests.go
index 83a35e9a..d2dd7460 100644
--- a/testschematic/tests.go
+++ b/testschematic/tests.go
@@ -5,8 +5,11 @@ import (
 	"os"
 	"strings"
 
+	"github.com/gruntwork-io/terratest/modules/terraform"
 	"github.com/stretchr/testify/assert"
+	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/cloudinfo"
 	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/common"
+	"github.com/terraform-ibm-modules/ibmcloud-terratest-wrapper/testhelper"
 )
 
 // RunSchematicTest will use the supplied options to run an end-to-end Terraform test of a project in an
@@ -24,32 +27,25 @@ func (options *TestSchematicOptions) RunSchematicTest() error {
 	// Any errors in this section will be considerd "unexpected" and returned to the calling unit test
 	// to short-circuit and quit the test.
 	// The official start of the unit test, with assertions, will begin AFTER workspace is properly created.
-
-	// create new schematic service with authenticator, set pointer of service in options for use later
-	var svc *SchematicsTestService
-	if options.schematicsTestSvc == nil {
-		svc = &SchematicsTestService{}
-	} else {
-		svc = options.schematicsTestSvc
+	svc, setupErr := testSetup(options)
+	if setupErr != nil {
+		return setupErr
 	}
-	svc.TestOptions = options
+
 	svc.TerraformTestStarted = false
 	svc.TerraformResourcesCreated = false
 
-	// create IAM authenticator if needed
-	if svc.ApiAuthenticator == nil {
-		svc.CreateAuthenticator(options.RequiredEnvironmentVars[ibmcloudApiKeyVar])
-	}
-
-	// create external API service if needed
-	if options.SchematicsApiSvc != nil {
-		svc.SchematicsApiSvc = options.SchematicsApiSvc
-	} else {
-		svcErr := svc.InitializeSchematicsService()
-		if svcErr != nil {
-			return fmt.Errorf("error creating schematics sdk service: %w", svcErr)
+	// PANIC CATCH and TEAR DOWN
+	// This defer will set up two things:
+	// A catch of a panic and recover, to continue all tests in case of panic
+	// Set up teardown to be performed after test is complete normally or if panic
+	defer func() {
+		if r := recover(); r != nil {
+			fmt.Println("=== RECOVER FROM PANIC ===")
+			options.Testing.Errorf("Recovered from panic: %v", r)
 		}
-	}
+		testTearDown(svc, options)
+	}()
 
 	// get the root path of this project
 	projectPath, pathErr := common.GitRootPath(".")
@@ -67,7 +63,7 @@ func (options *TestSchematicOptions) RunSchematicTest() error {
 
 	// create a new empty workspace, resulting in "draft" status
 	options.Testing.Log("[SCHEMATICS] Creating Test Workspace")
-	_, wsErr := svc.CreateTestWorkspace(options.Prefix, options.ResourceGroup, options.TemplateFolder, options.TerraformVersion, options.Tags)
+	_, wsErr := svc.CreateTestWorkspace(options.Prefix, options.ResourceGroup, svc.WorkspaceLocation, options.TemplateFolder, options.TerraformVersion, options.Tags)
 	if wsErr != nil {
 		return fmt.Errorf("error creating new schematic workspace: %w", wsErr)
 	}
@@ -76,9 +72,6 @@ func (options *TestSchematicOptions) RunSchematicTest() error {
 	// can be used in error messages to repeat workspace name
 	workspaceNameString := fmt.Sprintf("[ %s (%s) ]", svc.WorkspaceName, svc.WorkspaceID)
 
-	// since workspace is now created, always call the teardown to remove
-	defer testTearDown(svc, options)
-
 	// upload the terraform code
 	options.Testing.Log("[SCHEMATICS] Uploading TAR file")
 	uploadErr := svc.UploadTarToWorkspace(tarballName)
@@ -119,16 +112,25 @@ func (options *TestSchematicOptions) RunSchematicTest() error {
 	svc.TerraformTestStarted = true
 
 	// ------ PLAN ------
+	planSuccess := false // will only flip to true if job completes
 	planResponse, planErr := svc.CreatePlanJob()
 	if assert.NoErrorf(options.Testing, planErr, "error creating PLAN - %s", workspaceNameString) {
 		options.Testing.Log("[SCHEMATICS] Starting PLAN job ...")
 		planJobStatus, planStatusErr := svc.WaitForFinalJobStatus(*planResponse.Activityid)
 		if assert.NoErrorf(options.Testing, planStatusErr, "error waiting for PLAN to finish - %s", workspaceNameString) {
-			assert.Equalf(options.Testing, SchematicsJobStatusCompleted, planJobStatus, "PLAN has failed with status %s - %s", planJobStatus, workspaceNameString)
+			planSuccess = assert.Equalf(options.Testing, SchematicsJobStatusCompleted, planJobStatus, "PLAN has failed with status %s - %s", planJobStatus, workspaceNameString)
+		}
+
+		if !planSuccess || options.PrintAllSchematicsLogs {
+			printPlanLogErr := svc.printWorkspaceJobLogToTestLog(*planResponse.Activityid, "PLAN")
+			if printPlanLogErr != nil {
+				options.Testing.Logf("Error printing PLAN logs:%s", printPlanLogErr)
+			}
 		}
 	}
 
 	// ------ APPLY ------
+	applySuccess := false // will only flip to true if job completes
 	if !options.Testing.Failed() {
 		applyResponse, applyErr := svc.CreateApplyJob()
 		if assert.NoErrorf(options.Testing, applyErr, "error creating APPLY - %s", workspaceNameString) {
@@ -138,25 +140,46 @@ func (options *TestSchematicOptions) RunSchematicTest() error {
 
 			applyJobStatus, applyStatusErr := svc.WaitForFinalJobStatus(*applyResponse.Activityid)
 			if assert.NoErrorf(options.Testing, applyStatusErr, "error waiting for APPLY to finish - %s", workspaceNameString) {
-				assert.Equalf(options.Testing, SchematicsJobStatusCompleted, applyJobStatus, "APPLY has failed with status %s - %s", applyJobStatus, workspaceNameString)
+				applySuccess = assert.Equalf(options.Testing, SchematicsJobStatusCompleted, applyJobStatus, "APPLY has failed with status %s - %s", applyJobStatus, workspaceNameString)
+			}
+
+			if !applySuccess || options.PrintAllSchematicsLogs {
+				printApplyLogErr := svc.printWorkspaceJobLogToTestLog(*applyResponse.Activityid, "APPLY")
+				if printApplyLogErr != nil {
+					options.Testing.Logf("Error printing APPLY logs:%s", printApplyLogErr)
+				}
 			}
 		}
 	}
 
-	// ------ DESTROY ------
-	// only run destroy if we had potentially created resources
-	if svc.TerraformResourcesCreated {
-		// Check if "DO_NOT_DESTROY_ON_FAILURE" is set
-		envVal, _ := os.LookupEnv("DO_NOT_DESTROY_ON_FAILURE")
-		if options.Testing.Failed() && strings.ToLower(envVal) == "true" {
-			options.Testing.Log("[SCHEMATICS] Schematics APPLY failed. Debug the Test and delete resources manually.")
-		} else {
-			destroyResponse, destroyErr := svc.CreateDestroyJob()
-			if assert.NoErrorf(options.Testing, destroyErr, "error creating DESTROY - %s", workspaceNameString) {
-				options.Testing.Log("[SCHEMATICS] Starting DESTROY job ...")
-				destroyJobStatus, destroyStatusErr := svc.WaitForFinalJobStatus(*destroyResponse.Activityid)
-				if assert.NoErrorf(options.Testing, destroyStatusErr, "error waiting for DESTROY to finish - %s", workspaceNameString) {
-					assert.Equalf(options.Testing, SchematicsJobStatusCompleted, destroyJobStatus, "DESTROY has failed with status %s - %s", destroyJobStatus, workspaceNameString)
+	// ------ CONSISTENCY PLAN ------
+	consistencyPlanSuccess := false // will only flip to true if job completes
+	if !options.Testing.Failed() {
+		consistencyPlanResponse, consistencyPlanErr := svc.CreatePlanJob()
+		if assert.NoErrorf(options.Testing, consistencyPlanErr, "error creating PLAN - %s", workspaceNameString) {
+			options.Testing.Log("[SCHEMATICS] Starting CONSISTENCY PLAN job ...")
+			consistencyPlanJobStatus, consistencyPlanStatusErr := svc.WaitForFinalJobStatus(*consistencyPlanResponse.Activityid)
+			if assert.NoErrorf(options.Testing, consistencyPlanStatusErr, "error waiting for CONSISTENCY PLAN to finish - %s", workspaceNameString) {
+				if assert.Equalf(options.Testing, SchematicsJobStatusCompleted, consistencyPlanJobStatus, "CONSISTENCY PLAN has failed with status %s - %s", consistencyPlanJobStatus, workspaceNameString) {
+					// if the consistency plan was successful, get the plan json and check consistency
+					consistencyPlanJson, consistencyPlanJsonErr := svc.TestOptions.CloudInfoService.GetSchematicsJobPlanJson(*consistencyPlanResponse.Activityid, svc.WorkspaceLocation)
+					if assert.NoErrorf(options.Testing, consistencyPlanJsonErr, "error retrieving CONSISTENCY PLAN JSON - %w - %s", consistencyPlanJsonErr, workspaceNameString) {
+						// convert the json string into a terratest plan struct
+						planStruct, planStructErr := terraform.ParsePlanJSON(consistencyPlanJson)
+						if assert.NoErrorf(options.Testing, planStructErr, "error converting plan string into struct: %w -%s", planStructErr, workspaceNameString) {
+							// base the success not on job complete, but on if consistency test finds any problems
+							// CheckConsistency returns TRUE if it finds issues, so we will negate that for success
+							foundConsistencyIssues := testhelper.CheckConsistency(planStruct, options)
+							consistencyPlanSuccess = !foundConsistencyIssues
+						}
+					}
+				}
+			}
+
+			if !consistencyPlanSuccess || options.PrintAllSchematicsLogs {
+				printConsistencyLogErr := svc.printWorkspaceJobLogToTestLog(*consistencyPlanResponse.Activityid, "CONSISTENCY PLAN")
+				if printConsistencyLogErr != nil {
+					options.Testing.Logf("Error printing PLAN logs:%s", printConsistencyLogErr)
 				}
 			}
 		}
@@ -165,23 +188,153 @@ func (options *TestSchematicOptions) RunSchematicTest() error {
 	return nil
 }
 
+// testSetup is a helper function that will initialize and setup the SchematicsTestService in preparation for a test
+// Any errors in this section will be considerd "unexpected" and returned to the calling unit test
+// to short-circuit and quit the test.
+func testSetup(options *TestSchematicOptions) (*SchematicsTestService, error) {
+	// create new schematic service with authenticator, set pointer of service in options for use later
+	var svc *SchematicsTestService
+	if options.schematicsTestSvc == nil {
+		svc = &SchematicsTestService{}
+	} else {
+		svc = options.schematicsTestSvc
+	}
+
+	svc.TestOptions = options
+
+	// create new CloudInfoService if not supplied
+	if options.CloudInfoService == nil {
+		cloudInfoSvc, cloudInfoErr := cloudinfo.NewCloudInfoServiceFromEnv("TF_VAR_ibmcloud_api_key", cloudinfo.CloudInfoServiceOptions{})
+		if cloudInfoErr != nil {
+			return nil, cloudInfoErr
+		}
+		svc.CloudInfoService = cloudInfoSvc
+		options.CloudInfoService = cloudInfoSvc
+	} else {
+		svc.CloudInfoService = options.CloudInfoService
+	}
+
+	// pick random region for workspace if it was not supplied
+	// if no region specified, choose a random one
+	if len(options.WorkspaceLocation) > 0 {
+		svc.WorkspaceLocation = options.WorkspaceLocation
+	} else {
+		svc.WorkspaceLocation = cloudinfo.GetRandomSchematicsLocation()
+		svc.TestOptions.Testing.Logf("[SCHEMATICS] Random Workspace region chosen: %s", svc.WorkspaceLocation)
+	}
+
+	// create IAM authenticator if needed
+	if svc.ApiAuthenticator == nil {
+		svc.CreateAuthenticator(options.RequiredEnvironmentVars[ibmcloudApiKeyVar])
+	}
+
+	// create external API service if needed
+	if options.SchematicsApiSvc != nil {
+		svc.SchematicsApiSvc = options.SchematicsApiSvc
+	} else {
+		svcErr := svc.InitializeSchematicsService()
+		if svcErr != nil {
+			return nil, fmt.Errorf("error creating schematics sdk service: %w", svcErr)
+		}
+	}
+
+	return svc, nil
+}
+
 // testTearDown is a helper function, typically called via golang "defer", that will clean up and remove any existing resources that were
 // created for the test.
 // The removal of some resources may be influenced by certain conditions or optional settings.
 func testTearDown(svc *SchematicsTestService, options *TestSchematicOptions) {
-	// ------ DELETE WORKSPACE ------
-	// only delete workspace if one of these is true:
-	// * terraform hasn't been started yet
-	// * no failures
-	// * failed and DeleteWorkspaceOnFail is true
-	if !svc.TerraformTestStarted ||
-		!options.Testing.Failed() ||
-		(options.Testing.Failed() && options.DeleteWorkspaceOnFail) {
-
-		options.Testing.Log("[SCHEMATICS] Deleting Workspace")
-		_, deleteWsErr := svc.DeleteWorkspace()
-		if deleteWsErr != nil {
-			options.Testing.Logf("[SCHEMATICS] WARNING: Schematics WORKSPACE DELETE failed! Remove manually if required. Name: %s (%s)", svc.WorkspaceName, svc.WorkspaceID)
+
+	// PANIC CATCH and TEAR DOWN
+	// if there is a panic during resource destroy, recover and fail test but do not continue with teardown
+	defer func() {
+		if r := recover(); r != nil {
+			fmt.Println("=== RECOVER FROM PANIC IN testschematic.testTearDown() ===")
+			options.Testing.Error("Panic recovery during schematics teardown")
+		}
+	}()
+
+	// only perform if skip is not set
+	if !options.SkipTestTearDown {
+		// ------ DESTROY RESOURCES ------
+		// only run destroy if we had potentially created resources
+		if svc.TerraformResourcesCreated {
+			// Once we enter this block, turn the Created to false
+			// This is to prevent this part from running again in case of panic and tear down is executed a 2nd time
+			svc.TerraformResourcesCreated = false
+
+			// Check if "DO_NOT_DESTROY_ON_FAILURE" is set
+			envVal, _ := os.LookupEnv("DO_NOT_DESTROY_ON_FAILURE")
+			if options.Testing.Failed() && strings.ToLower(envVal) == "true" {
+				options.Testing.Log("[SCHEMATICS] Schematics APPLY failed. Debug the Test and delete resources manually.")
+			} else {
+				destroySuccess := false // will only flip to true if job completes
+				destroyResponse, destroyErr := svc.CreateDestroyJob()
+				if assert.NoErrorf(options.Testing, destroyErr, "error creating DESTROY - %s", svc.WorkspaceName) {
+					options.Testing.Log("[SCHEMATICS] Starting DESTROY job ...")
+					destroyJobStatus, destroyStatusErr := svc.WaitForFinalJobStatus(*destroyResponse.Activityid)
+					if assert.NoErrorf(options.Testing, destroyStatusErr, "error waiting for DESTROY to finish - %s", svc.WorkspaceName) {
+						destroySuccess = assert.Equalf(options.Testing, SchematicsJobStatusCompleted, destroyJobStatus, "DESTROY has failed with status %s - %s", destroyJobStatus, svc.WorkspaceName)
+					}
+
+					if !destroySuccess || options.PrintAllSchematicsLogs {
+						printDestroyLogErr := svc.printWorkspaceJobLogToTestLog(*destroyResponse.Activityid, "DESTROY")
+						if printDestroyLogErr != nil {
+							options.Testing.Logf("Error printing DESTROY logs:%s", printDestroyLogErr)
+						}
+					}
+				}
+			}
+		}
+
+		// only attempt to delete workspace if it was created (valid workspace id)
+		if len(svc.WorkspaceID) > 0 {
+			// ------ DELETE WORKSPACE ------
+			// only delete workspace if one of these is true:
+			// * terraform hasn't been started yet
+			// * no failures
+			// * failed and DeleteWorkspaceOnFail is true
+			if !svc.TerraformTestStarted ||
+				!options.Testing.Failed() ||
+				(options.Testing.Failed() && options.DeleteWorkspaceOnFail) {
+
+				options.Testing.Log("[SCHEMATICS] Deleting Workspace")
+				_, deleteWsErr := svc.DeleteWorkspace()
+				if deleteWsErr != nil {
+					options.Testing.Logf("[SCHEMATICS] WARNING: Schematics WORKSPACE DELETE failed! Remove manually if required. Name: %s (%s)", svc.WorkspaceName, svc.WorkspaceID)
+				}
+			}
 		}
 	}
 }
+
+// SPECIAL NOTE: We do not want to fail the test if there is any issue/error retrieving or printing a log.
+// In this function we will be capturing most errors and to simply short-circuit and return
+// the error to the caller, to avoid any panic or test failure.
+func (svc *SchematicsTestService) printWorkspaceJobLogToTestLog(jobID string, jobType string) error {
+
+	// if for some reason cloudInfo has not been initialized, return immediately
+	if svc.CloudInfoService == nil {
+		return fmt.Errorf("could not get workspace logs, CloudInfoService was not initialized which is unexpected - JobID %s", jobID)
+	}
+
+	// retrieve job log
+	jobLog, jobLogErr := svc.CloudInfoService.GetSchematicsJobLogsText(jobID, svc.WorkspaceLocation)
+	if jobLogErr != nil {
+		return jobLogErr
+	}
+	if len(jobLog) == 0 {
+		return fmt.Errorf("workspace job log was empty which is unexpected - JobID %s", jobID)
+	}
+
+	// create some headers and footers
+	logHeader := fmt.Sprintf("=============== BEGIN %s JOB LOG (%s) ===============", strings.ToUpper(jobType), svc.WorkspaceID)
+	logFooter := fmt.Sprintf("=============== END %s JOB LOG (%s) ===============", strings.ToUpper(jobType), svc.WorkspaceID)
+	finalLog := fmt.Sprintf("%s\n%s\n%s", logHeader, jobLog, logFooter)
+
+	// print out log text
+	svc.TestOptions.Testing.Log(finalLog)
+
+	return nil
+}
diff --git a/testschematic/tests_test.go b/testschematic/tests_test.go
index ef31a622..d518dbfe 100644
--- a/testschematic/tests_test.go
+++ b/testschematic/tests_test.go
@@ -17,8 +17,6 @@ func TestSchematicFullTest(t *testing.T) {
 	svc := &SchematicsTestService{
 		SchematicsApiSvc: schematicSvc,
 		ApiAuthenticator: authSvc,
-		WorkspaceID:      mockWorkspaceID,
-		TemplateID:       mockTemplateID,
 	}
 	//mockErrorType := new(schematicv1ErrorMock)
 	zero := 0
@@ -41,6 +39,7 @@ func TestSchematicFullTest(t *testing.T) {
 		schematicsTestSvc:            svc,
 		SchematicSvcRetryCount:       &zero,
 		SchematicSvcRetryWaitSeconds: &zero,
+		CloudInfoService:             &cloudInfoServiceMock{},
 	}
 
 	// mock at least one good tar upload and one other completed activity
@@ -53,12 +52,17 @@ func TestSchematicFullTest(t *testing.T) {
 
 	t.Run("CleanRun", func(t *testing.T) {
 		err := options.RunSchematicTest()
-		assert.NoError(t, err)
+		assert.NoError(t, err, "error:%s", err)
 		assert.True(t, schematicSvc.applyComplete)
 		assert.True(t, schematicSvc.destroyComplete)
 		assert.True(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("WorkspaceCreateFail", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		options.DeleteWorkspaceOnFail = false // shouldn't matter
@@ -69,6 +73,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.False(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("WorkspaceSetupFail", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		schematicSvc.failReplaceWorkspaceInputs = true // after workspace create but before terraform
@@ -79,6 +88,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.True(t, schematicSvc.workspaceDeleteComplete) // delete workspace on fail if terraform isn't started
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("PlanFailedLeaveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		schematicSvc.failPlanWorkspaceCommand = true
@@ -89,6 +103,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.False(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("PlanFailedRemoveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		schematicSvc.failPlanWorkspaceCommand = true
@@ -99,6 +118,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.True(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("ApplyCreateFailedRemoveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		schematicSvc.failApplyWorkspaceCommand = true
@@ -109,6 +133,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.True(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("ApplyCreateFailedLeaveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		schematicSvc.failApplyWorkspaceCommand = true
@@ -119,6 +148,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.False(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("DestroyCreateFailedLeaveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		schematicSvc.failDestroyWorkspaceCommand = true
@@ -129,6 +163,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.False(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	// set apply to failed
 	schematicSvc.activities = []schematics.WorkspaceActivity{
 		{ActionID: core.StringPtr(mockActivityID), Name: core.StringPtr(SchematicsJobTypeUpload), PerformedAt: conv.DateTime(strfmt.DateTime(time.Now().Add(-time.Second * 5))), Status: core.StringPtr(SchematicsJobStatusCompleted)},
@@ -137,6 +176,11 @@ func TestSchematicFullTest(t *testing.T) {
 		{ActionID: core.StringPtr(mockDestroyID), Name: core.StringPtr("TEST-DESTROY-JOB"), PerformedAt: conv.DateTime(strfmt.DateTime(time.Now().Add(-time.Second * 2))), Status: core.StringPtr(SchematicsJobStatusCompleted)},
 	}
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("ApplyTerraformFailedLeaveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		options.DeleteWorkspaceOnFail = false
@@ -146,6 +190,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.False(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("ApplyTerraformFailedRemoveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		options.DeleteWorkspaceOnFail = true
@@ -155,6 +204,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.True(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	// set destroy to failed
 	schematicSvc.activities = []schematics.WorkspaceActivity{
 		{ActionID: core.StringPtr(mockActivityID), Name: core.StringPtr(SchematicsJobTypeUpload), PerformedAt: conv.DateTime(strfmt.DateTime(time.Now().Add(-time.Second * 5))), Status: core.StringPtr(SchematicsJobStatusCompleted)},
@@ -163,6 +217,11 @@ func TestSchematicFullTest(t *testing.T) {
 		{ActionID: core.StringPtr(mockDestroyID), Name: core.StringPtr("TEST-DESTROY-JOB"), PerformedAt: conv.DateTime(strfmt.DateTime(time.Now().Add(-time.Second * 2))), Status: core.StringPtr(SchematicsJobStatusFailed)},
 	}
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("DestroyTerraformFailedLeaveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		options.DeleteWorkspaceOnFail = false
@@ -172,6 +231,11 @@ func TestSchematicFullTest(t *testing.T) {
 		assert.False(t, schematicSvc.workspaceDeleteComplete)
 	})
 
+	options.schematicsTestSvc = &SchematicsTestService{
+		SchematicsApiSvc: schematicSvc,
+		ApiAuthenticator: authSvc,
+	}
+
 	t.Run("DestroyTerraformFailedRemoveWorkspace", func(t *testing.T) {
 		mockSchematicServiceReset(schematicSvc, options)
 		options.DeleteWorkspaceOnFail = true