diff --git a/deploy/terraform-datacommons-website/examples/website_v1/main.tf b/deploy/terraform-datacommons-website/examples/website_v1/main.tf index 69a3416e5e..6a1a1a9090 100644 --- a/deploy/terraform-datacommons-website/examples/website_v1/main.tf +++ b/deploy/terraform-datacommons-website/examples/website_v1/main.tf @@ -66,6 +66,7 @@ module "apikeys" { module "esp" { source = "../../modules/esp" project_id = var.project_id + mixer_githash = var.mixer_githash } module "cluster" { @@ -93,33 +94,23 @@ resource "google_compute_managed_ssl_certificate" "dc_website_cert" { } } -data "google_container_cluster" "dc_web_cluster" { - name = module.cluster.name - location = var.region - project = var.project_id - - depends_on = [module.cluster] -} - -data "google_client_config" "default" {} - +# IMPORTANT NOTE: This script assumes that +# "~/.kube/config" already exists. This is because provider cannot depend on data or resources, +# as provider blocks need to be determined before resources/data states are fetched. +# In install_custom_dc.sh, currentlythe kubeconfig is fetched before calling terraform apply. +# .kube/config is the location where gcloud command for GKE stores cluster config, which +# is required to access the cluster, including using helm. provider "kubernetes" { alias = "datcom" - host = "https://${data.google_container_cluster.dc_web_cluster.endpoint}" - token = data.google_client_config.default.access_token - cluster_ca_certificate = base64decode( - data.google_container_cluster.dc_web_cluster.master_auth[0].cluster_ca_certificate - ) + kubernetes { + config_path = "~/.kube/config" + } } provider "helm" { alias = "datcom" kubernetes { - host = "https://${data.google_container_cluster.dc_web_cluster.endpoint}" - token = data.google_client_config.default.access_token - cluster_ca_certificate = base64decode( - data.google_container_cluster.dc_web_cluster.master_auth[0].cluster_ca_certificate - ) + config_path = "~/.kube/config" } } diff --git a/deploy/terraform-datacommons-website/examples/website_v1/outputs.tf b/deploy/terraform-datacommons-website/examples/website_v1/outputs.tf new file mode 100644 index 0000000000..6eb1179fcc --- /dev/null +++ b/deploy/terraform-datacommons-website/examples/website_v1/outputs.tf @@ -0,0 +1,7 @@ +output "cluster_name" { + value = module.cluster.name +} + +output "cluster_region" { + value = module.cluster.region +} diff --git a/deploy/terraform-datacommons-website/modules/esp/main.tf b/deploy/terraform-datacommons-website/modules/esp/main.tf index 59bd0717ce..ca580263d8 100644 --- a/deploy/terraform-datacommons-website/modules/esp/main.tf +++ b/deploy/terraform-datacommons-website/modules/esp/main.tf @@ -13,26 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -resource "null_resource" "fetch_mixer_grpc_latest_pb" { - # Alwways fetch the latest gRPC protobuf. - # This makes sure that /tmp/mixer-grpc.latest.pb exists even in re-runs. - triggers = { - always_run = "${timestamp()}" - } - - provisioner "local-exec" { - command = "gsutil cp ${var.mixer_grpc_pb_gcs_path} /tmp/mixer-grpc.latest.pb" - } -} - # Needed because file(https://www.terraform.io/language/functions/file) # cannot be used for dynamically generated files. -data "local_file" "mixer_grpc_latest_pb" { - filename = "/tmp/mixer-grpc.latest.pb" - depends_on = [ - null_resource.fetch_mixer_grpc_latest_pb - ] +# Once https://github.com/GoogleCloudPlatform/magic-modules/pull/6895 +# is merged, this can be replaced with google_storage_bucket_object_content so that +# mixer grpc pb can be fetched directly from gcs, as opposed first downloading it locally. +# https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/storage_bucket_object_content +# +# +# Currently this is copied over during terraform runtime in install_custom_dc.sh +# This is because terraform apply expects the file to already exist at the path +# specified below. +# When testing locally, copy over the mixer grpc pb manually into the current +# module folder, like below. (Script below assumes you are currently in module folder). +# gsutil cp \ +# gs://datcom-mixer-grpc/mixer-grpc/mixer-grpc.$MIXER_GITHASH.pb \ +# mixer-grpc.$MIXER_GITHASH.pb +data "local_file" "mixer_grpc_pb" { + filename = "${path.module}/mixer-grpc.${var.mixer_githash}.pb" } # Note: deleted endpoints cannot be re-created. @@ -45,10 +43,10 @@ resource "google_endpoints_service" "mixer_endpoint" { "%SERVICE_NAME%", "website-esp.endpoints.${var.project_id}.cloud.goog"), "%API_TITLE%" , "website-esp.endpoints.${var.project_id}.cloud.goog") - protoc_output_base64 = data.local_file.mixer_grpc_latest_pb.content_base64 + protoc_output_base64 = data.local_file.mixer_grpc_pb.content_base64 depends_on = [ - data.local_file.mixer_grpc_latest_pb + data.local_file.mixer_grpc_pb ] } diff --git a/deploy/terraform-datacommons-website/modules/esp/variables.tf b/deploy/terraform-datacommons-website/modules/esp/variables.tf index 4c22987752..55afdee12b 100644 --- a/deploy/terraform-datacommons-website/modules/esp/variables.tf +++ b/deploy/terraform-datacommons-website/modules/esp/variables.tf @@ -19,8 +19,7 @@ variable "project_id" { description = "GCP project id where the API keys will be created." } -variable "mixer_grpc_pb_gcs_path" { +variable "mixer_githash" { type = string - description = "Full GCS path to mixer's compiled grpc protobuf definition." - default = "gs://datcom-mixer-grpc/mixer-grpc/mixer-grpc.latest.pb" + description = "Mixer githash to be used to fetch the proto file for ESP." } diff --git a/deploy/terraform-datacommons-website/modules/gke/outputs.tf b/deploy/terraform-datacommons-website/modules/gke/outputs.tf index bc28eb9180..d729d9a086 100644 --- a/deploy/terraform-datacommons-website/modules/gke/outputs.tf +++ b/deploy/terraform-datacommons-website/modules/gke/outputs.tf @@ -15,3 +15,7 @@ output "name" { value = local.cluster_name } + +output "region" { + value = var.region +} diff --git a/scripts/install_custom_dc.sh b/scripts/install_custom_dc.sh index 48e9bc2b1f..c403656912 100755 --- a/scripts/install_custom_dc.sh +++ b/scripts/install_custom_dc.sh @@ -14,7 +14,17 @@ # limitations under the License. set -e -CUSTOM_DC_RELEASE_TAG=custom-dc-v0.1.0 +CUSTOM_DC_RELEASE_TAG=custom-dc-v0.2.0 + +# In some environments (such as Cloud Shell), IPv6 is not enabled on the OS. +# This causes problems during terraform runs. Fix is from the issue below. +# For more context, see https://github.com/hashicorp/terraform-provider-google/issues/6782 +sudo chmod a+w /etc/hosts +export APIS="googleapis.com www.googleapis.com storage.googleapis.com iam.googleapis.com container.googleapis.com cloudresourcemanager.googleapis.com" +for i in $APIS +do + echo "199.36.153.10 $i" >> /etc/hosts +done TERRAFORM_PATH=$(which terraform) if [[ -n "$TERRAFORM_PATH" ]]; then @@ -54,6 +64,7 @@ if [ -z "$REGISTER_DOMAIN" ] && [ -z "$CUSTOM_DC_DOMAIN" ]; then echo "Error: environment variable CUSTOM_DC_DOMAIN is required because default domain is not used." 1>&2 echo "Default domain is not used because environment variable REGISTER_DOMAIN is not set." 1>&2 echo "export CUSTOM_DC_DOMAIN= if you intend to use a domain that you own." 1>&2 + exit 1 fi if [[ -n "$CUSTOM_DC_DOMAIN" ]]; then @@ -110,6 +121,14 @@ terraform init \ -backend-config="bucket=$TF_STATE_BUCKET" \ -backend-config="prefix=website_v1" +gcloud container clusters get-credentials $(terraform output --raw cluster_name) \ + --region $(terraform output --raw cluster_region) \ + --project $PROJECT_ID || true + +gsutil cp \ + gs://datcom-mixer-grpc/mixer-grpc/mixer-grpc.$MIXER_GITHASH.pb \ + $WEBSITE_ROOT/deploy/terraform-datacommons-website/modules/esp/mixer-grpc.$MIXER_GITHASH.pb + # -datacommons.com is the default domain name defined in setup/main.tf terraform apply \ -var="project_id=$PROJECT_ID" \