auditing.tf 4.81 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
resource "google_bigquery_dataset" "audit_logs" {
    for_each = { for v in local.all_projects : v => v }
    dataset_id        = "${replace(google_project.hipaa_project[each.key].name, "-", "_")}_audit_logs"
    project           = google_project.hipaa_project["audit"].project_id
    # friendly_name               = "${var.projectPrefix}-Data"
    description       = "${google_project.hipaa_project[each.key].project_id} - Log Export BQ Dataset"
    location          = "US"
    default_table_expiration_ms = 10368000000 #120 Days

    # Have to ignore access for changes due to circular dependency
    lifecycle {
        ignore_changes = ["access"]
    }

    access {
        role        = "OWNER"
        group_by_email = local.owners_group_email
    }

    access {
        role        = "READER"
        group_by_email = local.auditor_group_email
    }

    # This doesn't work as it creates a circular dependency
    # access{
    #   role      = "WRITER"
    #   user_by_email = split(":", google_logging_project_sink.hipaa_audit_project_logs_bq_sink.writer_identity)[1]
    # }
    depends_on = [ google_project_service.bq-api ]
}

resource "google_logging_project_sink" "audit_logs_bq" {
    for_each = { for v in local.all_projects : v => v }
    name = "hipaa-audit-logs-to-bq"
    project = google_project.hipaa_project[each.key].project_id
    destination = "bigquery.googleapis.com/projects/${google_project.hipaa_project["audit"].project_id}/datasets/${google_bigquery_dataset.audit_logs[each.key].dataset_id}"
    filter = "logName:logs/cloudaudit.googleapis.com"
    unique_writer_identity = true
}

resource "null_resource" "sink_writer_bq_access" {
    for_each = { for v in local.all_projects : v => v }
    triggers = {
        writer_identity = google_logging_project_sink.audit_logs_bq[each.key].writer_identity
    }

    provisioner "local-exec" {
        command = "${path.module}/set-sink-writer-bq-access.sh ${google_project.hipaa_project["audit"].project_id} ${google_bigquery_dataset.audit_logs[each.key].dataset_id} ${google_logging_project_sink.audit_logs_bq[each.key].writer_identity}"
    }
Adam Robinson's avatar
Adam Robinson committed
51
52

    depends_on = [ null_resource.install_gcloud_cli ]
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
}

resource "google_logging_project_sink" "audit_logs_gcs" {
    for_each = { for v in local.all_projects : v => v }
    name = "hipaa-audit-logs-to-gcs"
    project = google_project.hipaa_project[each.key].project_id
    destination = "storage.googleapis.com/${google_storage_bucket.audit_logs[each.key].name}"    
    filter = "logName:logs/cloudaudit.googleapis.com"
    unique_writer_identity = true
}

# # Create Hosting Project Audit Logs Bucket in Audit Project
resource "google_storage_bucket" "audit_logs" {
    for_each = { for v in local.all_projects : v => v }
    name = "${google_project.hipaa_project[each.key].project_id}-audit-logs"
    project = google_project.hipaa_project["audit"].project_id
    location = "US"
    storage_class = "MULTI_REGIONAL"
    lifecycle_rule{
        action{
        type = "SetStorageClass"
        storage_class = "NEARLINE"
        }
        condition{
        age = 30      
        }
    }
    lifecycle_rule{
        action{
        type = "SetStorageClass"
        storage_class = "COLDLINE"
        }
        condition{
        age = 180      
        }
    }
}

# # # Metric for unexpected bucket access
resource "google_logging_metric" "unexpected_audit_bucket_access" {
    for_each = { for v in local.all_projects : v => v }
    project = google_project.hipaa_project["audit"].project_id
    name = "bucket-unexpected-access-${google_storage_bucket.audit_logs[each.key].name}"
    filter = "resource.type=gcs_bucket AND logName=projects/${google_project.hipaa_project["audit"].project_id}%2F/data_access AND protoPayload.resourceName=projects/_/buckets/${google_storage_bucket.audit_logs[each.key].name} AND protoPayload.authenticationInfo.principalEmail!=(${local.auditor_group_email} AND ${local.owners_group_email})" 
    description = "Count of unexpected data access on ${google_storage_bucket.audit_logs[each.key].name}"
    metric_descriptor {
        metric_kind = "DELTA"
        value_type = "INT64"
    }
}

# # # Because our sink uses a unique_writer, we must grant that writer access to the bucket.
resource "google_storage_bucket_iam_binding" "audit_log_writer" {
    for_each = { for v in local.all_projects : v => v }
    bucket = google_storage_bucket.audit_logs[each.key].name
    role = "roles/storage.objectCreator"

    members = [
        google_logging_project_sink.audit_logs_gcs[each.key].writer_identity,
    ]
}

# Grant auditor access to read from audit log bucket
resource "google_storage_bucket_iam_binding" "audit_log_reader" {
    for_each = { for v in local.all_projects : v => v }
    bucket = google_storage_bucket.audit_logs[each.key].name
    role = "roles/storage.objectViewer"

    members = [
        "group:${local.auditor_group_email}",
    ]
}