Note: The default ITS GitLab runner is a shared resource and is subject to slowdowns during heavy usage.
You can run your own GitLab runner that is dedicated just to your group if you need to avoid processing delays.

Commit 57cd992a authored by Kenny Moore's avatar Kenny Moore
Browse files

Modular_Audit_fixes


Co-authored-by: Adam Robinson's avatarRobinson, Adam <adarobin@umich.edu>
parent c7429583
......@@ -51,7 +51,12 @@ module "audit" {
database_function_url = local.database_function_url[var.environment]
shortcode = var.shortcode
environment = var.environment
audit_logs_access = var.audit_logs_access
audit_logs_access = var.audit_logs_access
big_query = true
big_query_retention = 14
gcs = true
gcs_storage_class = "COLDLINE"
gcs_expiration_days = 1100
}
resource "google_folder" "customer_folder" {
......
......@@ -3,7 +3,7 @@ locals {
project_id_prefix = "${var.division}-audit-"
project_id_customer = substr(local.short_mcomm,0,30-5-length(local.project_id_prefix))
project_id = "${local.project_id_prefix}${local.project_id_customer}-${random_id.id.hex}"
project_id = "${local.project_id_prefix}${local.project_id_customer}-${random_id.id[0].hex}"
project_name = substr("${var.division} Audit ${local.short_mcomm}",0,30)
......@@ -60,15 +60,22 @@ locals {
campus = "pubsub.googleapis.com/projects/gcp-at-um-mon/topics/gcp-at-um-logs-prod"
michigan_medicine = "pubsub.googleapis.com/projects/gcp-at-um-mon/topics/gcp-at-um-logs-prod"
hipaa = "pubsub.googleapis.com/projects/gcp-at-um-mon/topics/hipaa-logs-prod" # "pubsub.googleapis.com/projects/${var.division}-logs-${var.environment}"
}
}
}
big_query_enabled = var.big_query ? 1 : 0
gcs_enabled = var.gcs ? 1 : 0
big_query_or_gcs_enabled = var.gcs || var.big_query ? 1 : 0
gcs_expiration_seconds = (var.gcs_expiration_days - 1) * 24 * 60 * 60
}
resource "google_project" "gcp_project" {
name = local.project_name
project_id = local.project_id
folder_id = var.folder_id
billing_account = var.billing_id
count = local.big_query_or_gcs_enabled
name = local.project_name
project_id = local.project_id
folder_id = var.folder_id
billing_account = var.billing_id
auto_create_network = false
labels = {
"shortcode" = var.shortcode
......@@ -76,9 +83,10 @@ resource "google_project" "gcp_project" {
}
module "log_export_bq" {
count = local.big_query_enabled
source = "terraform-google-modules/log-export/google"
version = "5.1.0"
destination_uri = module.destination_bq.destination_uri # "${module.destination_bq.destination_uri}"
destination_uri = module.destination_bq[0].destination_uri # "${module.destination_bq.destination_uri}"
filter = local.log_filters
log_sink_name = "${var.division}-audit-bq"
parent_resource_id = var.folder_id
......@@ -88,18 +96,20 @@ module "log_export_bq" {
}
module "destination_bq" {
count = local.big_query_enabled
source = "terraform-google-modules/log-export/google//modules/bigquery"
project_id = google_project.gcp_project.project_id
dataset_name = replace("${var.division}-audit-${local.short_mcomm}-${random_id.id.hex}", "-", "_")
project_id = google_project.gcp_project[0].project_id
dataset_name = replace("${var.division}-audit-${local.short_mcomm}-${random_id.id[0].hex}", "-", "_")
description = "Aggregated Log Sink (folder) - HIPAA Customer Logs"
log_sink_writer_identity = module.log_export_bq.writer_identity # "${module.log_export_bq.writer_identity}"
log_sink_writer_identity = module.log_export_bq[0].writer_identity # "${module.log_export_bq.writer_identity}"
location = "US"
expiration_days = 14
expiration_days = var.big_query_retention
}
resource "google_bigquery_dataset_iam_binding" "bq_user" {
project = google_project.gcp_project.project_id
dataset_id = module.destination_bq.resource_name
count = local.big_query_enabled
project = google_project.gcp_project[0].project_id
dataset_id = module.destination_bq[0].resource_name
role = "roles/bigquery.user"
members = [
# "user:kenmoore@umich.edu",
......@@ -135,8 +145,9 @@ resource "null_resource" "customer_database" {
resource "google_project_iam_member" "project_iam" {
project = google_project.gcp_project.project_id
role = "roles/editor"
count = local.big_query_or_gcs_enabled
project = google_project.gcp_project[0].project_id
role = "roles/viewer"
member = "group:${var.mcomm_group_email}"
}
......@@ -161,40 +172,44 @@ resource "google_project_iam_member" "project_iam" {
# working on lien on audit project
resource "google_resource_manager_lien" "project_lien" {
parent = "projects/${google_project.gcp_project.project_id}"
count = local.big_query_or_gcs_enabled
parent = "projects/${google_project.gcp_project[0].project_id}"
restrictions = ["resourcemanager.projects.delete"]
origin = "Per HIPAA recommendation/requirements; Prevent deletion of ${google_project.gcp_project.project_id}"
reason = "${google_project.gcp_project.project_id} holds the audit logs for HIPAA customer folder id ${var.folder_id}"
origin = "Per HIPAA recommendation/requirements; Prevent deletion of ${google_project.gcp_project[0].project_id}"
reason = "${google_project.gcp_project[0].project_id} holds the audit logs for HIPAA customer folder id ${var.folder_id}"
}
resource "random_id" "id" {
count = local.big_query_or_gcs_enabled
byte_length = 2
}
resource "google_logging_folder_sink" "hipaa_customer_logs" {
name = "${var.division}-audit-${local.short_mcomm}"
count = local.gcs_enabled
name = "${var.division}-audit-${local.short_mcomm}"
description = "Aggregated Log Sink - HIPAA Customer Logs"
folder = var.folder_id
folder = var.folder_id
# include logs from all projects in folder
include_children = true
# send to GCS bucket in audit project
destination = "storage.googleapis.com/${google_storage_bucket.hipaa_customer_logs.name}"
destination = "storage.googleapis.com/${google_storage_bucket.hipaa_customer_logs[0].name}"
filter = local.log_filters
}
resource "google_storage_bucket" "hipaa_customer_logs" {
project = google_project.gcp_project.project_id
name = "${var.division}-audit-${local.short_mcomm}-${random_id.id.hex}"
resource "google_storage_bucket" "hipaa_customer_logs" {
count = local.gcs_enabled
project = google_project.gcp_project[0].project_id
name = "${var.division}-audit-${local.short_mcomm}-${random_id.id[0].hex}"
location = "US-CENTRAL1"
force_destroy = false
uniform_bucket_level_access = true
storage_class = "COLDLINE"
storage_class = var.gcs_storage_class
retention_policy {
retention_period = "95000000" # just over 3 years
retention_period = local.gcs_expiration_seconds # just over 3 years ## <-- needs to be a local
}
lifecycle_rule {
......@@ -207,43 +222,49 @@ resource "google_storage_bucket" "hipaa_customer_logs" {
}
}
lifecycle_rule {
action {
type = "Delete"
}
condition {
age = 1100 # just over 3 x 365 days = 3 years (slightly more than the retention policy)
dynamic "lifecycle_rule" {
for_each = var.gcs_expiration_days == null ? [] : [ var.gcs_expiration_days ]
content {
action {
type = "Delete"
}
condition {
age = var.gcs_expiration_days # just over 3 x 365 days = 3 years (slightly more than the retention policy)
}
}
}
}
resource "google_project_iam_binding" "hipaa_customer_log_writer" {
project = google_project.gcp_project.project_id
count = local.gcs_enabled
project = google_project.gcp_project[0].project_id
role = "roles/storage.objectCreator"
members = [
google_logging_folder_sink.hipaa_customer_logs.writer_identity,
google_logging_folder_sink.hipaa_customer_logs[0].writer_identity,
]
}
resource "google_logging_project_sink" "log_export" {
project = google_project.gcp_project.project_id
# name = "${google_project.gcp_project.project_id}-log-export"
name = "log-export-splunk"
destination = local.log_export_destination[var.environment][var.division]
filter = local.filter_string # if adding filter to match nothing; use in conjunction with aggregated log sink + exclusion filter
exclusions {
name = "Aggregated_Logs_Exclusion"
description = "Excluding logs already captured by aggregated log sink (at folder). Please DO NOT alter/remove."
filter = local.log_filters
}
unique_writer_identity = true
}
# resource "google_logging_project_sink" "log_export" {
# count = local.big_query_or_gcs_enabled
# project = google_project.gcp_project.project_id
# # name = "${google_project.gcp_project.project_id}-log-export"
# name = "log-export-splunk"
# destination = local.log_export_destination[var.environment][var.division]
# filter = local.filter_string # if adding filter to match nothing; use in conjunction with aggregated log sink + exclusion filter
# exclusions {
# name = "Aggregated_Logs_Exclusion"
# description = "Excluding logs already captured by aggregated log sink (at folder). Please DO NOT alter/remove."
# filter = local.log_filters
# }
# unique_writer_identity = true
# }
resource "google_pubsub_topic_iam_member" "publisher" {
project = local.log_export_project
topic = local.log_export_destination[var.environment][var.division]
role = "roles/pubsub.publisher"
member = google_logging_project_sink.log_export.writer_identity
}
# resource "google_pubsub_topic_iam_member" "publisher" {
# count = local.big_query_or_gcs_enabled
# project = local.log_export_project
# topic = local.log_export_destination[var.environment][var.division]
# role = "roles/pubsub.publisher"
# member = google_logging_project_sink.log_export.writer_identity
# }
......@@ -47,7 +47,25 @@ variable "audit_logs_access" {
description = "MCommunity group that will be given permission to GCS bucket and/or BQ dataset"
}
# variable "big_query" {
# type = bool
# }
\ No newline at end of file
variable "big_query" {
type = bool
}
variable "big_query_retention" {
description = "Number of days to retain BigQuery data in table"
type = number #null
}
variable "gcs" {
type = bool
}
variable "gcs_storage_class" {
type = string
# default = "coldline"
}
variable "gcs_expiration_days" {
type = number
# default = 1100
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment