Commit 0eba3e7e authored by Adam Robinson's avatar Adam Robinson
Browse files

rework module to use for_each instead of count

parent f81aa08a
# Local .terraform directories
**/.terraform/*
# .tfstate files
*.tfstate
*.tfstate.*
# Crash log files
crash.log
# Ignore any .tfvars files that are generated automatically for each Terraform run. Most
# .tfvars files are managed as part of configuration and so should be included in
# version control.
#
# example.tfvars
# Ignore override files as they are usually used to override resources locally and so
# are not checked in
override.tf
override.tf.json
*_override.tf
*_override.tf.json
# Include override files you do wish to add to version control using negated pattern
#
# !example_override.tf
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
# example: *tfplan*
# Don't publish GCP service account creds
vci-dev-tf-admin-a9b805dc141e.json
# Ignore Mac junk
.DS_store
\ No newline at end of file
resource "null_resource" "is_project_type_valid" {
triggers = contains(var.project_types, var.project_type) == true ? {} : file("ERROR: project_type must be one of audit, data, worker")
lifecycle {
ignore_changes = [
triggers
]
}
}
# resource "null_resource" "datalab_user_list_only_workers" {
# triggers = var.datalab_user_list != [] && var.project_type != "worker" ? file("ERROR: datalab_user_list is only for worker projects") : {}
# lifecycle {
# ignore_changes = [
# triggers
# ]
# }
# }
resource "null_resource" "is_reader_group_email_valid" {
#triggers = var.reader_group_email == "" && contains(var.project_types_storing_data, var.project_type) == true ? file("ERROR: reader_group_email must be set for worker and data projects") : {}
triggers = var.reader_group_email == "" && var.project_type == "data" ? file("ERROR: reader_group_email must be set for data projects") : {}
lifecycle {
ignore_changes = [
triggers
]
}
}
resource "null_resource" "is_writer_group_email_valid" {
triggers = var.writer_group_email == "" && contains(var.project_types_storing_data, var.project_type) == true ? file("ERROR: writer_group_email must be set for worker and data projects") : {}
lifecycle {
ignore_changes = [
triggers
]
}
}
resource "null_resource" "is_audit_project_id_valid" {
triggers = var.audit_project_id == "" && var.project_type != "audit" ? file("ERROR: audit_project_id must be set for all project types other than audit") : {}
lifecycle {
ignore_changes = [
triggers
]
}
}
resource "null_resource" "is_worker_id_valid" {
triggers = var.worker_id == "" && var.project_type == "worker" ? file("ERROR: worker_id must be set for worker projects") : {}
lifecycle {
ignore_changes = [
triggers
]
}
}
\ No newline at end of file
resource "google_bigquery_dataset" "audit_logs" {
for_each = { for v in local.all_projects : v => v }
dataset_id = "${replace(google_project.hipaa_project[each.key].name, "-", "_")}_audit_logs"
project = google_project.hipaa_project["audit"].project_id
# friendly_name = "${var.projectPrefix}-Data"
description = "${google_project.hipaa_project[each.key].project_id} - Log Export BQ Dataset"
location = "US"
default_table_expiration_ms = 10368000000 #120 Days
# Have to ignore access for changes due to circular dependency
lifecycle {
ignore_changes = ["access"]
}
access {
role = "OWNER"
group_by_email = local.owners_group_email
}
access {
role = "READER"
group_by_email = local.auditor_group_email
}
# This doesn't work as it creates a circular dependency
# access{
# role = "WRITER"
# user_by_email = split(":", google_logging_project_sink.hipaa_audit_project_logs_bq_sink.writer_identity)[1]
# }
depends_on = [ google_project_service.bq-api ]
}
resource "google_logging_project_sink" "audit_logs_bq" {
for_each = { for v in local.all_projects : v => v }
name = "hipaa-audit-logs-to-bq"
project = google_project.hipaa_project[each.key].project_id
destination = "bigquery.googleapis.com/projects/${google_project.hipaa_project["audit"].project_id}/datasets/${google_bigquery_dataset.audit_logs[each.key].dataset_id}"
filter = "logName:logs/cloudaudit.googleapis.com"
unique_writer_identity = true
}
resource "null_resource" "sink_writer_bq_access" {
for_each = { for v in local.all_projects : v => v }
triggers = {
writer_identity = google_logging_project_sink.audit_logs_bq[each.key].writer_identity
}
provisioner "local-exec" {
command = "${path.module}/set-sink-writer-bq-access.sh ${google_project.hipaa_project["audit"].project_id} ${google_bigquery_dataset.audit_logs[each.key].dataset_id} ${google_logging_project_sink.audit_logs_bq[each.key].writer_identity}"
}
}
resource "google_logging_project_sink" "audit_logs_gcs" {
for_each = { for v in local.all_projects : v => v }
name = "hipaa-audit-logs-to-gcs"
project = google_project.hipaa_project[each.key].project_id
destination = "storage.googleapis.com/${google_storage_bucket.audit_logs[each.key].name}"
filter = "logName:logs/cloudaudit.googleapis.com"
unique_writer_identity = true
}
# # Create Hosting Project Audit Logs Bucket in Audit Project
resource "google_storage_bucket" "audit_logs" {
for_each = { for v in local.all_projects : v => v }
name = "${google_project.hipaa_project[each.key].project_id}-audit-logs"
project = google_project.hipaa_project["audit"].project_id
location = "US"
storage_class = "MULTI_REGIONAL"
lifecycle_rule{
action{
type = "SetStorageClass"
storage_class = "NEARLINE"
}
condition{
age = 30
}
}
lifecycle_rule{
action{
type = "SetStorageClass"
storage_class = "COLDLINE"
}
condition{
age = 180
}
}
}
# # # Metric for unexpected bucket access
resource "google_logging_metric" "unexpected_audit_bucket_access" {
for_each = { for v in local.all_projects : v => v }
project = google_project.hipaa_project["audit"].project_id
name = "bucket-unexpected-access-${google_storage_bucket.audit_logs[each.key].name}"
filter = "resource.type=gcs_bucket AND logName=projects/${google_project.hipaa_project["audit"].project_id}%2F/data_access AND protoPayload.resourceName=projects/_/buckets/${google_storage_bucket.audit_logs[each.key].name} AND protoPayload.authenticationInfo.principalEmail!=(${local.auditor_group_email} AND ${local.owners_group_email})"
description = "Count of unexpected data access on ${google_storage_bucket.audit_logs[each.key].name}"
metric_descriptor {
metric_kind = "DELTA"
value_type = "INT64"
}
}
# # # Because our sink uses a unique_writer, we must grant that writer access to the bucket.
resource "google_storage_bucket_iam_binding" "audit_log_writer" {
for_each = { for v in local.all_projects : v => v }
bucket = google_storage_bucket.audit_logs[each.key].name
role = "roles/storage.objectCreator"
members = [
google_logging_project_sink.audit_logs_gcs[each.key].writer_identity,
]
}
# Grant auditor access to read from audit log bucket
resource "google_storage_bucket_iam_binding" "audit_log_reader" {
for_each = { for v in local.all_projects : v => v }
bucket = google_storage_bucket.audit_logs[each.key].name
role = "roles/storage.objectViewer"
members = [
"group:${local.auditor_group_email}",
]
}
\ No newline at end of file
data "google_folder" "project_folder" {
folder = var.folder_id
}
\ No newline at end of file
# Create GCS Bucket in Data or Worker projects
resource "google_storage_bucket" "hipaa_data" {
count = contains(var.project_types_storing_data, var.project_type) == true ? 1 : 0
name = "${google_project.hipaa_project.project_id}-data"
project = google_project.hipaa_project.project_id
for_each = { for v in local.projects_storing_data : v => v }
name = "${google_project.hipaa_project[each.key].project_id}-data"
project = google_project.hipaa_project[each.key].project_id
versioning {
enabled = true
}
......@@ -11,32 +11,40 @@ resource "google_storage_bucket" "hipaa_data" {
bucket_policy_only = true
}
# Create Bucket IAM Policy in Data or Worker projects
resource "google_storage_bucket_iam_binding" "data_bucket_writer_iam_binding" {
count = contains(var.project_types_storing_data, var.project_type) == true ? 1 : 0
bucket = google_storage_bucket.hipaa_data[0].name
# Create Bucket IAM Policy in Data project
resource "google_storage_bucket_iam_binding" "data_project_bucket_writer_iam_binding" {
bucket = google_storage_bucket.hipaa_data["data"].name
role = "roles/storage.legacyBucketWriter"
members = [
"group:${var.writer_group_email}"
"group:${local.writer_group_email}"
]
}
resource "google_storage_bucket_iam_binding" "data_bucket_reader_iam_binding" {
count = contains(var.project_types_storing_data, var.project_type) == true && var.reader_group_email != "" ? 1 : 0
bucket = google_storage_bucket.hipaa_data[0].name
resource "google_storage_bucket_iam_binding" "data_project_bucket_reader_iam_binding" {
bucket = google_storage_bucket.hipaa_data["data"].name
role = "roles/storage.legacyBucketReader"
members = [
"group:${var.reader_group_email}"
"group:${local.read-only_group_email}"
]
}
# Create Bucket IAM Policy in Worker projects
resource "google_storage_bucket_iam_binding" "worker_project_bucket_writer_iam_binding" {
for_each = { for v in local.worker_projects : v => v }
bucket = google_storage_bucket.hipaa_data[each.key].name
role = "roles/storage.legacyBucketWriter"
members = [
"group:${local.worker_project_email[each.key]}"
]
}
# Metric for unexpected bucket access in Data or Worker projects
resource "google_logging_metric" "unexpected_bucket_access" {
count = contains(var.project_types_storing_data, var.project_type) == true ? 1 : 0
project = google_project.hipaa_project.project_id
for_each = { for v in local.projects_storing_data : v => v }
project = google_project.hipaa_project[each.key].project_id
name = "bucket-unexpected-access"
filter = "resource.type=gcs_bucket AND logName=projects/${google_project.hipaa_project.project_id}%2F/data_access AND protoPayload.resourceName=projects/_/buckets/${google_storage_bucket.hipaa_data[0].name} AND protoPayload.authenticationInfo.principalEmail!=(${var.writer_group_email}${var.reader_group_email != "" ? "AND ${var.reader_group_email}" : ""})"
description = "Count of unexpected data access on ${google_storage_bucket.hipaa_data[0].name}"
filter = "resource.type=gcs_bucket AND logName=projects/${google_project.hipaa_project[each.key].project_id}%2F/data_access AND protoPayload.resourceName=projects/_/buckets/${google_storage_bucket.hipaa_data[each.key].name} AND protoPayload.authenticationInfo.principalEmail!=(${local.writer_group_email} AND ${local.read-only_group_email}})"
description = "Count of unexpected data access on ${google_storage_bucket.hipaa_data[each.key].name}"
metric_descriptor {
metric_kind = "DELTA"
value_type = "INT64"
......@@ -45,20 +53,19 @@ resource "google_logging_metric" "unexpected_bucket_access" {
# Create BigQuery Dataset in Data or Worker projects
resource "google_bigquery_dataset" "hipaa_data_bq" {
count = contains(var.project_types_storing_data, var.project_type) == true ? 1 : 0
dataset_id = "${replace(google_project.hipaa_project.name, "-", "_")}_data_bq"
project = google_project.hipaa_project.project_id
# friendly_name = "${var.projectPrefix}-Data"
description = "${google_project.hipaa_project.name} - BQ Dataset"
for_each = { for v in local.projects_storing_data : v => v }
dataset_id = "${replace(google_project.hipaa_project[each.key].name, "-", "_")}_data_bq"
project = google_project.hipaa_project[each.key].project_id
description = "${google_project.hipaa_project[each.key].name} - BQ Dataset"
location = "US"
access {
role = "OWNER"
group_by_email = var.owners_group_email
group_by_email = local.owners_group_email
}
dynamic "access" {
for_each = var.reader_group_email == "" ? [] : [var.reader_group_email]
for_each = each.key == "data" ? [local.read-only_group_email] : []
content {
role = "organizations/715302536254/roles/BigQueryDataViewer_NO_Export"
group_by_email = access.value
......@@ -66,23 +73,23 @@ resource "google_bigquery_dataset" "hipaa_data_bq" {
}
dynamic "access" {
for_each = var.datalab_service_account_readers
for_each = each.key == "data" ? [local.writer_group_email] : []
content {
role = "organizations/715302536254/roles/BigQueryDataViewer_NO_Export"
user_by_email = access.value
role = "organizations/715302536254/roles/BigQueryDataEditor_NO_Export"
group_by_email = access.value
}
}
dynamic "access" {
for_each = var.project_type != "worker" ? [var.writer_group_email] : []
for_each = each.key == "data" ? local.datalab_service_account_readers : []
content {
role = "organizations/715302536254/roles/BigQueryDataEditor_NO_Export"
group_by_email = access.value
role = "organizations/715302536254/roles/BigQueryDataViewer_NO_Export"
user_by_email = access.value
}
}
dynamic "access" {
for_each = var.project_type == "worker" ? [var.writer_group_email] : []
for_each = each.key != "data" ? [local.worker_project_email[each.key]] : []
content {
role = "organizations/715302536254/roles/BigQueryDataViewer_NO_Export"
group_by_email = access.value
......@@ -90,7 +97,7 @@ resource "google_bigquery_dataset" "hipaa_data_bq" {
}
dynamic "access" {
for_each = [for v in var.datalab_user_list : google_service_account.datalab_service_account[v].email]
for_each = [for v in var.datalab_user_list : v.value == "each.key" ? google_service_account.datalab_service_account[v.key].email : []]
content {
role = "organizations/715302536254/roles/BigQueryDataEditor_NO_Export"
user_by_email = access.value
......
resource "google_project_iam_binding" "datalab_service_account_iam_binding" {
project = google_project.hipaa_project["data"].project_id
role = "roles/bigquery.jobUser"
members = [ for v in local.datalab_service_account_readers : "serviceAccount:${v}" ]
}
resource "google_project_service" "sourcerepo-api" {
count = var.project_type == "worker" ? 1 : 0
project = google_project.hipaa_project.project_id
service = "sourcerepo.googleapis.com"
# disable_dependent_services = true
for_each = { for v in local.worker_projects : v => v }
project = google_project.hipaa_project[each.key].project_id
service = "sourcerepo.googleapis.com"
}
resource "google_sourcerepo_repository" "datalab-notebooks" {
count = var.project_type == "worker" ? 1 : 0
for_each = { for v in local.worker_projects : v => v }
name = "datalab-notebooks"
project = google_project.hipaa_project.project_id
project = google_project.hipaa_project[each.key].project_id
depends_on = [ google_project_service.sourcerepo-api ]
}
resource "google_compute_network" "datalab-network" {
count = var.project_type == "worker" ? 1 : 0
for_each = { for v in local.worker_projects : v => v }
name = "datalab-network"
auto_create_subnetworks = true
project = google_project.hipaa_project.project_id
project = google_project.hipaa_project[each.key].project_id
description = "Network for Google Cloud Datalab instances"
}
resource "google_compute_firewall" "datalab-network-allow-ssh" {
count = var.project_type == "worker" ? 1 : 0
for_each = { for v in local.worker_projects : v => v }
name = "datalab-network-allow-ssh"
description = "Allow SSH access to Datalab instances"
network = google_compute_network.datalab-network[0].name
project = google_project.hipaa_project.project_id
network = google_compute_network.datalab-network[each.key].name
project = google_project.hipaa_project[each.key].project_id
priority = 1000
direction = "INGRESS"
......@@ -35,11 +41,16 @@ resource "google_compute_firewall" "datalab-network-allow-ssh" {
protocol = "tcp"
ports = [ "22" ]
}
}
resource "google_service_account" "datalab_service_account" {
for_each = var.datalab_user_list
account_id = "datalab-${split("@", each.key)[0]}"
project = google_project.hipaa_project[each.value].project_id
}
resource "google_service_account_iam_binding" "datalab_service_account_iam_binding" {
for_each = { for v in var.datalab_user_list : v => v }
for_each = var.datalab_user_list
service_account_id = google_service_account.datalab_service_account[each.key].name
role = "roles/iam.serviceAccountUser"
members = [
......@@ -48,7 +59,7 @@ resource "google_service_account_iam_binding" "datalab_service_account_iam_bindi
}
data "google_iam_policy" "datalab_user_to_instance_policy" {
for_each = { for v in var.datalab_user_list : v => v }
for_each = var.datalab_user_list
binding {
role = "roles/compute.instanceAdmin.v1"
......@@ -59,34 +70,30 @@ data "google_iam_policy" "datalab_user_to_instance_policy" {
}
resource "google_compute_instance_iam_policy" "datalab_user_to_instance_binding" {
for_each = { for v in var.datalab_user_list : v => v }
for_each = var.datalab_user_list
instance_name = "datalab-${split("@", each.key)[0]}"
project = google_project.hipaa_project.project_id
project = google_project.hipaa_project[each.value].project_id
zone = "us-central1-a"
policy_data = "${data.google_iam_policy.datalab_user_to_instance_policy[each.key].policy_data}"
depends_on = [ null_resource.datalab ]
}
resource "google_service_account" "datalab_service_account" {
for_each = { for v in var.datalab_user_list : v => v }
account_id = "datalab-${split("@", each.key)[0]}"
project = google_project.hipaa_project.project_id
}
resource "null_resource" "datalab" {
for_each = { for v in var.datalab_user_list : v => v }
for_each = var.datalab_user_list
triggers = {
user = each.key
}
provisioner "local-exec" {
command = "datalab create --project ${google_project.hipaa_project.project_id} --machine-type n1-standard-1 --zone us-central1-a --no-connect --for-user ${each.key} --service-account ${google_service_account.datalab_service_account[each.key].email} datalab-${split("@", each.key)[0]}"
command = "datalab create --project ${google_project.hipaa_project[each.value].project_id} --machine-type n1-standard-1 --zone us-central1-a --no-connect --for-user ${each.key} --service-account ${google_service_account.datalab_service_account[each.key].email} datalab-${split("@", each.key)[0]}"
}
provisioner "local-exec" {
when = "destroy"
command = "datalab delete --quiet --delete-disk --project ${google_project.hipaa_project.project_id} --zone us-central1-a datalab-${split("@", each.key)[0]}"
command = "datalab delete --quiet --delete-disk --project ${google_project.hipaa_project[each.value].project_id} --zone us-central1-a datalab-${split("@", each.key)[0]}"
}
depends_on = [ google_sourcerepo_repository.datalab-notebooks, google_compute_network.datalab-network, null_resource.external_dependencies ]
depends_on = [ google_sourcerepo_repository.datalab-notebooks, google_compute_network.datalab-network, null_resource.install_datalab_cli ]
}
\ No newline at end of file
# Create folder under HIPAA folder in resource tree (Org policies at folder level)
resource "google_folder" "project_folder" {
display_name = var.project_prefix
parent = "folders/${var.parent_folder_id}"
}
resource "random_id" "id" {
for_each = { for v in local.all_projects : v => v }
byte_length = 2
}
resource "google_project" "hipaa_project" {
#name = var.project_type == "worker" ? "${var.projectPrefix}-${var.project_type}${var.worker_id}" : "${var.projectPrefix}-${var.project_type}"
name = "${var.projectPrefix}-${var.project_type}${var.worker_id}"
#project_id = var.project_type == "worker" ? "${var.projectPrefix}-${var.project_type}${var.worker_id}-${random_id.id.hex}" : "${var.projectPrefix}-${var.project_type}-${random_id.id.hex}"
project_id = "${var.projectPrefix}-${var.project_type}${var.worker_id}-${random_id.id.hex}"
folder_id = var.folder_id
billing_account = var.billingId
for_each = { for v in local.all_projects : v => v }
name = "${var.project_prefix}-${each.key}"
project_id = "${var.project_prefix}-${each.key}-${random_id.id[each.key].hex}"
folder_id = google_folder.project_folder.id
billing_account = var.billing_account_id
auto_create_network = false
}
resource "null_resource" "external_dependencies" {
triggers = {
dependencies = join(",", var.dependencies)
}
}
# Enable security reviewer permission for audit project
resource "google_project_iam_binding" "auditor_iam" {
count = var.project_type == "audit" ? 1 : 0
project = google_project.hipaa_project.project_id
project = google_project.hipaa_project["audit"].project_id
role = "roles/iam.securityReviewer"
members = [
"group:${var.auditor_group_email}",
"group:${local.auditor_group_email}",
]
}
resource "google_project_iam_binding" "datalab_service_account_iam_binding" {
count = var.project_type == "data" ? 1 : 0
project = google_project.hipaa_project.project_id
role = "roles/bigquery.jobUser"
members = [ for v in var.datalab_service_account_readers : "serviceAccount:${v}" ]
}
# Enable BQ APIs
resource "google_project_service" "bq-api" {
count = var.project_type != "network" ? 1 : 0
project = google_project.hipaa_project.project_id
service = "bigquery-json.googleapis.com"
# disable_dependent_services = true
for_each = { for v in local.bq_enabled_projects : v => v }
project = google_project.hipaa_project[each.key].project_id
service = "bigquery-json.googleapis.com"
}
# Prevent Project Deletion for Auditor and Data projects
resource "google_resource_manager_lien" "project_lien" {
count = contains(var.project_types_with_lien, var.project_type) == true ? 1 : 0
parent = "projects/${google_project.hipaa_project.project_id}"
restrictions = ["resourcemanager.projects.delete"]
origin = "Per HIPAA recommendation/requirements; Prevent deletion of ${google_project.hipaa_project.project_id}"
reason = "${google_project.hipaa_project.project_id} is part of a HIPAA research project in folder - ${data.google_folder.project_folder.display_name}"
for_each = { for v in local.projects_with_lien : v => v }
parent = "projects/${google_project.hipaa_project[each.key].project_id}"
restrictions = ["resourcemanager.projects.delete"]
origin = "Per HIPAA recommendation/requirements; Prevent deletion of ${google_project.hipaa_project[each.key].project_id}"
reason = "${google_project.hipaa_project[each.key].project_id} is part of a HIPAA research project in folder - ${google_folder.project_folder.display_name}"
}
# # # # Metric for BQ settings change
# resource "google_logging_metric" "bq_settings_change" {
# project = google_project.hipaa_project.project_id
# name = "bigquery-settings-change-count"
# filter = "resource.type=bigquery_resource AND protoPayload.methodName=datasetservice.update"
# description = "Count of BigQuery permission changes"
# metric_descriptor {
# metric_kind = "DELTA"
# value_type = "INT64"
# }
# }
# Metric for Set IAM Permissions
resource "google_logging_metric" "set_iam_permissions_change" {
project = google_project.hipaa_project.project_id
name = "Set-IAM-Permissions-Change"
# filter = "resource.type=gcs_bucket AND protoPayload.methodName=storage.googleapis.com AND (protoPayload.methodName=storage.setIamPermissions OR protoPayload.methodName=storage.objects.update)"
filter = "protoPayload.methodName: SetIam"
description = "Count of Set IAM Permission (on project and resources)"
metric_descriptor {
metric_kind = "DELTA"
value_type = "INT64"
}
for_each = { for v in local.all_projects : v => v }
project = google_project.hipaa_project[each.key].project_id
name = "Set-IAM-Permissions-Change"
# filter = "resource.type=gcs_bucket AND protoPayload.methodName=storage.googleapis.com AND (protoPayload.methodName=storage.setIamPermissions OR protoPayload.methodName=storage.objects.update)"
filter = "protoPayload.methodName: SetIam"
description = "Count of Set IAM Permission (on project and resources)"
metric_descriptor {
metric_kind = "DELTA"
value_type = "INT64"
}
}
# # # # Metric for bucket permission change
# resource "google_logging_metric" "bucket_permission_change" {
# project = google_project.hipaa_project.project_id
# name = "bucket-permission-change"
# filter = "resource.type=gcs_bucket AND protoPayload.methodName=storage.googleapis.com AND (protoPayload.methodName=storage.setIamPermissions OR protoPayload.methodName=storage.objects.update)"
# description = "Count of GCS permission changes"
# metric_descriptor {
# metric_kind = "DELTA"
# value_type = "INT64"
# }
# }
# # # # Metric for project IAM change
# resource "google_logging_metric" "iam_policy_change" {
# project = google_project.hipaa_project.project_id