Commit 042701ae authored by Kenny Moore's avatar Kenny Moore
Browse files

Cleaning up to make public

parent de89b14b
resource "google_bigquery_dataset" "audit_logs" {
for_each = { for v in local.all_projects : v => v }
dataset_id = "${replace(google_project.hipaa_project[each.key].name, "-", "_")}_audit_logs"
project = google_project.hipaa_project["audit"].project_id
# friendly_name = "${var.projectPrefix}-Data"
project = google_project.hipaa_project["audit"].project_id
description = "${google_project.hipaa_project[each.key].project_id} - Log Export BQ Dataset"
location = "US"
default_table_expiration_ms = 10368000000 #120 Days
......@@ -15,13 +14,7 @@ resource "google_bigquery_dataset" "audit_logs" {
access {
role = "READER"
group_by_email = local.auditor_group_email
}
# This doesn't work as it creates a circular dependency
# access{
# role = "WRITER"
# user_by_email = split(":", google_logging_project_sink.hipaa_audit_project_logs_bq_sink.writer_identity)[1]
# }
}
lifecycle {
//Since these are audit logs, they should not be destroyed
......@@ -46,26 +39,12 @@ resource "google_logging_project_sink" "audit_logs_bq" {
resource "google_bigquery_dataset_access" "sink_writer_bq_access" {
for_each = { for v in local.all_projects : v => v }
project = google_project.hipaa_project["audit"].project_id
# dataset_id = "${google_project.hipaa_project["audit"].project_id}/datasets/${google_bigquery_dataset.audit_logs[each.key].dataset_id}"
project = google_project.hipaa_project["audit"].project_id
dataset_id = "${google_bigquery_dataset.audit_logs[each.key].dataset_id}"
role = "WRITER"
user_by_email = split(":", google_logging_project_sink.audit_logs_bq[each.key].writer_identity)[1]
}
# resource "null_resource" "sink_writer_bq_access" {
# for_each = { for v in local.all_projects : v => v }
# triggers = {
# writer_identity = google_logging_project_sink.audit_logs_bq[each.key].writer_identity
# }
# provisioner "local-exec" {
# command = "${path.module}/set-sink-writer-bq-access.sh ${google_project.hipaa_project["audit"].project_id} ${google_bigquery_dataset.audit_logs[each.key].dataset_id} ${google_logging_project_sink.audit_logs_bq[each.key].writer_identity}"
# }
# depends_on = [ null_resource.install_gcloud_cli ]
# }
resource "google_logging_project_sink" "audit_logs_gcs" {
for_each = { for v in local.all_projects : v => v }
name = "hipaa-audit-logs-to-gcs"
......@@ -141,4 +120,4 @@ resource "google_storage_bucket_iam_binding" "audit_log_reader" {
members = [
"group:${local.auditor_group_email}",
]
}
\ No newline at end of file
}
......@@ -180,4 +180,4 @@ resource "google_bigquery_dataset" "hipaa_data_bq" {
}
depends_on = [ google_project_service.bq-api ]
}
\ No newline at end of file
}
variable "target_folder" {
default = "folders/931994294999"
}
resource "google_folder_organization_policy" "resource_locations" {
# folder = "folders/931994294999"
folder = var.target_folder
resource "google_folder_organization_policy" "resource_locations" {
folder = google_folder.project_folder.id
constraint = "gcp.resourceLocations"
list_policy{
......@@ -13,30 +8,3 @@ resource "google_folder_organization_policy" "resource_locations" {
}
}
}
# {
# "policies": [
# {
# "constraint": "constraints/compute.trustedImageProjects",
# "etag": "BwWjL9PEyhw=",
# "updateTime": "2020-04-13T17:58:15.104Z",
# "listPolicy": {
# "allowedValues": [
# "projects/gcp-at-u-m-image-repo"
# ],
# "inheritFromParent": true
# }
# },
# {
# "constraint": "constraints/gcp.resourceLocations",
# "etag": "BwWjPnW20Fs=",
# "updateTime": "2020-04-14T11:25:41.607Z",
# "listPolicy": {
# "allowedValues": [
# "in:us-locations"
# ],
# "inheritFromParent": true
# }
# }
# ]
# }
\ No newline at end of file
......@@ -56,21 +56,3 @@ resource "google_logging_metric" "set_iam_permissions_change" {
value_type = "INT64"
}
}
# resource "null_resource" "install_gcloud_cli" {
# triggers = {
# run_time = timestamp()
# }
# provisioner "local-exec" {
# command = <<EOH
# if [ `uname` != "Darwin" ]; then
# echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | sudo tee -a /etc/apt/sources.list.d/google-cloud-sdk.list
# curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -
# sudo apt-get --assume-yes update && sudo apt-get --assume-yes install google-cloud-sdk google-cloud-sdk-datalab jq
# echo $GOOGLE_CLOUD_KEYFILE_JSON > ~/key.json
# gcloud auth activate-service-account --key-file ~/key.json
# rm ~/key.json
# fi
# EOH
# }
# }
......@@ -29,4 +29,4 @@ resource "google_monitoring_alert_policy" "set_iam_change" {
}
}
notification_channels = [google_monitoring_notification_channel.email.name]
}
\ No newline at end of file
}
locals {
default_cidr = "10.255.0.0/16"
default_regions = ["us-central1", "us-east1", "us-east4", "us-west1"]
default_subnets = {for x in local.default_regions : x => cidrsubnet(local.default_cidr,4,index(local.default_regions, x))}
# pods_cidr = "10.255.128.0/17"
# pods_range = {for x in local.default_regions : x => cidrsubnet(local.pods_cidr,3,index(local.default_regions, x))}
# services_cidr = "10.255.224.0/19"
# services_range = {for x in local.default_regions : x => cidrsubnet(local.services_cidr,3,index(local.default_regions, x))}
default_subnets = {for x in local.default_regions : x => cidrsubnet(local.default_cidr,4,index(local.default_regions, x))}
network_type = "worker"
}
......@@ -27,26 +23,20 @@ resource "google_compute_shared_vpc_service_project" "hipaa-shared-vpc-worker" {
service_project = google_project.hipaa_project[each.key].project_id
}
## Thinking about the names for these subnets...
resource "google_compute_subnetwork" "worker_subnet" {
for_each = local.default_subnets
project = google_project.hipaa_project["network"].project_id
name = "${local.network_type}-${each.key}"
region = each.key
ip_cidr_range = each.value
network = google_compute_network.hipaa-shared-vpc.self_link
# secondary_ip_range = var.gke ? [{range_name = "${each.key}-pods", ip_cidr_range = local.pods_range[each.key]},{range_name = "${each.key}-services", ip_cidr_range = local.services_range[each.key]}] : []
network = google_compute_network.hipaa-shared-vpc.self_link
}
resource "google_compute_subnetwork_iam_binding" "worker_subnet_binding" {
for_each = local.default_subnets
project = google_project.hipaa_project["network"].project_id
region = each.key
subnetwork = google_compute_subnetwork.worker_subnet[each.key].name # <-- Issue here...created 4 subnets, need to reference for each subnet and apply permission for each worker group
subnetwork = google_compute_subnetwork.worker_subnet[each.key].name
role = "roles/compute.networkUser"
members = concat(compact([ for v in local.worker_projects : "serviceAccount:service-${google_project.hipaa_project[v].number}@gcp-sa-notebooks.iam.gserviceaccount.com"]), ["group:${local.all_workers_group_email}"])
}
# project_number = "672858288516"
# ai_notebook_agent = "service-672858288516@gcp-sa-notebooks.iam.gserviceaccount.com"
\ No newline at end of file
......@@ -37,8 +37,7 @@ resource "google_project_iam_binding" "datalab_service_account_iam_binding" {
for_each = { for v in local.worker_projects : v => v }
project = google_project.hipaa_project[each.key].project_id
role = "roles/bigquery.jobUser"
# members = compact([ for v in keys(var.datalab_user_list) : var.datalab_user_list[v] == each.value ? "serviceAccount:${google_service_account.datalab_service_account[v].email}" : "" ])
members = compact([ for v in var.datalab_user_list : v["project"] == each.value ? "serviceAccount:${google_service_account.datalab_service_account[v["username"]].email}" : "" ])
}
......@@ -135,26 +134,6 @@ resource "google_compute_instance_iam_policy" "notebook_user_to_instance_binding
policy_data = data.google_iam_policy.notebook_user_to_instance_policy[each.key].policy_data
}
# # Create AI Platform Notebooks Service Agent (to replace default)
# resource "google_service_account" "ai_notebooks_agent" {
# for_each = { for v in local.worker_projects : v => v }
# project = google_project.hipaa_project[each.key].project_id
# account_id = "ai-notebooks-agent"
# display_name = "Cloud AI Platform Notebooks Service Account"
# }
# # roles/notebooks.serviceAgent
# resource "google_project_iam_binding" "ai_notebooks_agent" {
# for_each = { for v in local.worker_projects : v => v }
# project = google_project.hipaa_project[each.key].project_id
# role = "roles/notebooks.serviceAgent"
# members = [
# "serviceAccount:${google_service_account.ai_notebooks_agent[each.key].email}",
# ]
# }
resource "google_notebooks_instance" "worker_nb" {
provider = google-beta
for_each = { for v in var.datalab_user_list : v["username"] => v }
......@@ -181,111 +160,3 @@ resource "google_notebooks_instance" "worker_nb" {
depends_on = [ google_project_service.notebooks_api, google_compute_subnetwork_iam_binding.worker_subnet_binding ]
}
# resource "google_compute_subnetwork" "worker_subnet" {
# creation_timestamp = "2020-05-21T05:40:54.303-07:00"
# gateway_address = "10.255.0.1"
# id = "projects/hipaa-test-network-706f/regions/us-central1/subnetworks/worker-us-central1"
# ip_cidr_range = "10.255.0.0/20"
# name = "worker-us-central1"
# network = "https://www.googleapis.com/compute/v1/projects/hipaa-test-network-706f/global/networks/hipaa-test-network"
# private_ip_google_access = false
# project = "hipaa-test-network-706f"
# region = "us-central1"
# secondary_ip_range = []
# self_link = "https://www.googleapis.com/compute/v1/projects/hipaa-test-network-706f/regions/us-central1/subnetworks/worker-us-central1"
# }
# resource "google_compute_instance" "ai_notebook" {
# for_each = { for v in var.datalab_user_list : v["username"] => v }
# project = google_project.hipaa_project[each.value["project"]].project_id
# # name = "notebook-${split("@", each.key)[0]}"
# name = "datalab-${split("@", each.key)[0]}"
# machine_type = each.value["machine_type"]
# # machine_type = "f1-micro"
# zone = var.datalab_zone
# tags = ["foo", "bar"] # https-server
# boot_disk {
# initialize_params {
# image = "deeplearning-platform-release/pytorch-latest-gpu" # var.image
# }
# }
# // Local SSD disk
# scratch_disk {
# interface = "SCSI"
# }
# network_interface {
# network = "datalab-network" # var.network (shared or default)
# access_config {
# // Ephemeral IP
# }
# }
# metadata = {
# proxy-mode = "project_editors" # service_account (perhaps? this is what I found in the UI)
# # # proxy-mode = "service_account" ("project_editors", "email")
# install-nvidia-driver = "true"
# framework = "PyTorch"
# shutdown-script = "timeout 30 gcloud compute instances remove-metadata gcloud-notebook-kenmoore-beta-sa --keys=proxy-url --zone var.datalab_zone"
# }
# # metadata_startup_script = "echo hi > /test.txt"
# metadata_startup_script = "sudo apt-get update; sudo apt-get install -y google-osconfig-agent" # This did not work :(
# service_account {
# # email = google_service_account.datalab_service_account[each.key].email
# # scopes = ["userinfo-email", "compute-ro", "storage-ro"]
# scopes = ["https://www.googleapis.com/auth/cloud-platform"]
# }
# scheduling {
# on_host_maintenance = "MIGRATE"
# }
# }
# # would like to adjust to allow networking project (shared vpc)
# module "datalab" {
# # for_each = var.datalab_user_list
# source = "terraform-google-modules/datalab/google//modules/default_instance"
# version = "~> 0.1"
# project_id = google_project.hipaa_project[each.value].project_id
# zone = var.datalab_zone
# # datalab_user_email = google_service_account.datalab_service_account[each.key].email # "google_service_account" "datalab_service_account"
# datalab_user_email =
# network_name = "datalab-network"
# subnet_name = "datalab-network"
# }
# resource "null_resource" "datalab" {
# for_each = var.datalab_user_list
# triggers = {
# user = each.key
# project_id = google_project.hipaa_project[each.value].project_id
# }
# provisioner "local-exec" {
# //command = "datalab create --project ${google_project.hipaa_project[each.value].project_id} --machine-type n1-standard-1 --zone us-central1-a --no-connect --for-user ${each.key} --service-account ${google_service_account.datalab_service_account[each.key].email} datalab-${split("@", each.key)[0]}"
# command = "datalab create --project ${self.triggers.project_id} --machine-type n1-standard-1 --zone ${var.datalab_zone} --no-connect --for-user ${self.triggers.user} --service-account ${google_service_account.datalab_service_account[self.triggers.user].email} datalab-${split("@", self.triggers.user)[0]}"
# }
# provisioner "local-exec" {
# when = destroy
# //command = "datalab delete --quiet --delete-disk --project ${google_project.hipaa_project[each.value].project_id} --zone us-central1-a datalab-${split("@", each.key)[0]}"
# command = "datalab delete --quiet --delete-disk --project ${self.triggers.project_id} --zone ${var.datalab_zone} datalab-${split("@", self.triggers.user)[0]}"
# }
# depends_on = [ google_sourcerepo_repository.datalab-notebooks, google_compute_network.datalab-network, null_resource.install_gcloud_cli ]
# }
#!/bin/bash
set -e
if [[ -z "$1" || -z "$2" || -z "$3" ]]; then
echo "pass [project id] [dataset id] [writer identity]"
exit 1
fi
if ! [ -x "$(command -v bq)" ]; then
echo "bq cli (bigquery cli, from gcloud cli) is not available in path"
exit 1
fi
project=$1
dataset=$2
writer=${3#"serviceAccount:"}
temp_file=$(mktemp)
trap "rm -f ${temp_file}" EXIT
bq show --format=prettyjson --project ${project} ${dataset} \
| jq --arg writer ${writer} '.access | . += [{"role": "WRITER", "userByEmail": $writer }] | {"access": .}' > ${temp_file}
bq update --source ${temp_file} ${project}:${dataset}
\ No newline at end of file
......@@ -42,10 +42,6 @@ data "google_compute_zones" "notebooks" {
project = google_project.hipaa_project["network"].project_id
region = var.notebook_region
}
# variable "datalab_zone" {
# description = "The zone to create datalab instances in"
# default = "us-central1-a"
# }
locals {
base_projects = ["audit", "data", "network", "monitor"]
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment