diff --git a/build/int.cloudbuild.yaml b/build/int.cloudbuild.yaml index d9e8575c..e9375b8a 100644 --- a/build/int.cloudbuild.yaml +++ b/build/int.cloudbuild.yaml @@ -76,7 +76,7 @@ steps: args: ['/bin/bash', '-c', 'cft test run TestLogBucketProjectModule --stage apply --verbose'] - id: go-verify-logbucket-project waitFor: - - go-apply-logbucket-org + - go-apply-logbucket-project name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' args: ['/bin/bash', '-c', 'cft test run TestLogBucketProjectModule --stage verify --verbose'] - id: go-teardown-logbucket-project diff --git a/examples/logbucket/project/README.md b/examples/logbucket/project/README.md index e6b569df..7e5afec9 100644 --- a/examples/logbucket/project/README.md +++ b/examples/logbucket/project/README.md @@ -1,6 +1,6 @@ # Log Export: Log Bucket destination at Project level -These examples configures a project-level log sink that feeds a logging log bucket destination with log bucket and log sink in the same project or in separated projects. +These examples configures a project-level log sink that feeds a logging log bucket destination with log bucket and log sink in the same project or in separated projects. This example also configures [Log Analytics](https://cloud.google.com/logging/docs/log-analytics) in the log bucket with a linked BigQuery dataset. ## Inputs @@ -14,8 +14,10 @@ These examples configures a project-level log sink that feeds a logging log buck | Name | Description | |------|-------------| +| log\_bkt\_linked\_ds\_name\_same\_proj | The name for the log bucket linked BigQuery dataset name in same project example. | | log\_bkt\_name\_same\_proj | The name for the log bucket for sink and logbucket in same project example. | | log\_bkt\_same\_proj | The project where the log bucket is created for sink and logbucket in same project example. | +| log\_bucket\_linked\_ds\_name | The name for the log bucket linked BigQuery dataset name. | | log\_bucket\_name | The name for the log bucket. | | log\_bucket\_project | The project where the log bucket is created. | | log\_sink\_dest\_uri\_same\_proj | A fully qualified URI for the log sink for sink and logbucket in same project example. | diff --git a/examples/logbucket/project/main.tf b/examples/logbucket/project/main.tf index e936df31..ddd6683e 100644 --- a/examples/logbucket/project/main.tf +++ b/examples/logbucket/project/main.tf @@ -31,11 +31,14 @@ module "log_export" { } module "destination" { - source = "../../..//modules/logbucket" - project_id = var.project_destination_logbkt_id - name = "logbucket_from_other_project_${random_string.suffix.result}" - location = "global" - log_sink_writer_identity = module.log_export.writer_identity + source = "../../..//modules/logbucket" + project_id = var.project_destination_logbkt_id + name = "logbucket_from_other_project_${random_string.suffix.result}" + location = "global" + enable_analytics = true + linked_dataset_id = "log_analytics_dataset" + linked_dataset_description = "dataset for log bucket" + log_sink_writer_identity = module.log_export.writer_identity } #-------------------------------------# @@ -56,6 +59,9 @@ module "dest_same_proj" { project_id = var.project_destination_logbkt_id name = "logbucket_from_same_project_${random_string.suffix.result}" location = "global" + enable_analytics = true + linked_dataset_id = "log_analytics_dataset_same" + linked_dataset_description = "dataset for log bucket in the same project" log_sink_writer_identity = module.log_export_same_proj.writer_identity grant_write_permission_on_bkt = false } diff --git a/examples/logbucket/project/outputs.tf b/examples/logbucket/project/outputs.tf index 50b292be..34c5a117 100644 --- a/examples/logbucket/project/outputs.tf +++ b/examples/logbucket/project/outputs.tf @@ -24,6 +24,11 @@ output "log_bucket_name" { value = module.destination.resource_name } +output "log_bucket_linked_ds_name" { + description = "The name for the log bucket linked BigQuery dataset name." + value = module.destination.linked_dataset_name +} + output "log_sink_project_id" { description = "The project id where the log sink is created." value = module.log_export.parent_resource_id @@ -58,6 +63,11 @@ output "log_bkt_name_same_proj" { value = module.dest_same_proj.resource_name } +output "log_bkt_linked_ds_name_same_proj" { + description = "The name for the log bucket linked BigQuery dataset name in same project example." + value = module.dest_same_proj.linked_dataset_name +} + output "log_sink_id_same_proj" { description = "The project id where the log sink is created for sink and logbucket in same project example." value = module.log_export_same_proj.parent_resource_id diff --git a/modules/logbucket/README.md b/modules/logbucket/README.md index 80f33e9c..c40183f2 100644 --- a/modules/logbucket/README.md +++ b/modules/logbucket/README.md @@ -37,7 +37,10 @@ module "destination" { | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| +| enable\_analytics | (Optional) Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled. | `bool` | `false` | no | | grant\_write\_permission\_on\_bkt | (Optional) Indicates whether the module is responsible for granting write permission on the logbucket. This permission will be given by default, but if the user wants, this module can skip this step. This is the case when the sink route logs to a log bucket in the same Cloud project, no new service account will be created and this module will need to bypass granting permissions. | `bool` | `true` | no | +| linked\_dataset\_description | A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters. | `string` | `null` | no | +| linked\_dataset\_id | The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters. | `string` | `null` | no | | location | The location of the log bucket. | `string` | `"global"` | no | | log\_sink\_writer\_identity | The service account that logging uses to write log entries to the destination. (This is available as an output coming from the root module). | `string` | n/a | yes | | name | The name of the log bucket to be created and used for log entries matching the filter. | `string` | n/a | yes | @@ -50,6 +53,7 @@ module "destination" { |------|-------------| | console\_link | The console link to the destination log buckets | | destination\_uri | The destination URI for the log bucket. | +| linked\_dataset\_name | The resource name of the linked BigQuery dataset. | | project | The project in which the log bucket was created. | | resource\_name | The resource name for the destination log bucket | diff --git a/modules/logbucket/main.tf b/modules/logbucket/main.tf index 356e1a1e..8faf87f9 100644 --- a/modules/logbucket/main.tf +++ b/modules/logbucket/main.tf @@ -33,10 +33,25 @@ resource "google_project_service" "enable_destination_api" { #------------# resource "google_logging_project_bucket_config" "bucket" { - project = google_project_service.enable_destination_api.project - location = var.location - retention_days = var.retention_days - bucket_id = var.name + project = google_project_service.enable_destination_api.project + location = var.location + retention_days = var.retention_days + enable_analytics = var.enable_analytics + bucket_id = var.name +} + +#-------------------------# +# Linked BigQuery dataset # +#-------------------------# + +resource "google_logging_linked_dataset" "linked_dataset" { + count = var.linked_dataset_id != null ? 1 : 0 + + link_id = var.linked_dataset_id + description = var.linked_dataset_description + parent = "projects/${google_project_service.enable_destination_api.project}" + bucket = google_logging_project_bucket_config.bucket.id + location = var.location } #--------------------------------# diff --git a/modules/logbucket/metadata.yaml b/modules/logbucket/metadata.yaml index f1c965a8..748d1b0e 100644 --- a/modules/logbucket/metadata.yaml +++ b/modules/logbucket/metadata.yaml @@ -65,11 +65,26 @@ spec: - name: splunk-sink location: examples/splunk-sink variables: + - name: enable_analytics + description: (Optional) Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled. + type: bool + default: false + required: false - name: grant_write_permission_on_bkt description: (Optional) Indicates whether the module is responsible for granting write permission on the logbucket. This permission will be given by default, but if the user wants, this module can skip this step. This is the case when the sink route logs to a log bucket in the same Cloud project, no new service account will be created and this module will need to bypass granting permissions. type: bool default: true required: false + - name: linked_dataset_description + description: A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters. + type: string + default: Logbucket linked BigQuery dataset + required: false + - name: linked_dataset_id + description: The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters. + type: string + default: null + required: false - name: location description: The location of the log bucket. type: string @@ -97,6 +112,8 @@ spec: description: The console link to the destination log buckets - name: destination_uri description: The destination URI for the log bucket. + - name: linked_dataset_name + description: The resource name of the linked BigQuery dataset. - name: project description: The project in which the log bucket was created. - name: resource_name diff --git a/modules/logbucket/outputs.tf b/modules/logbucket/outputs.tf index 5b48d2eb..57955985 100644 --- a/modules/logbucket/outputs.tf +++ b/modules/logbucket/outputs.tf @@ -33,3 +33,8 @@ output "destination_uri" { description = "The destination URI for the log bucket." value = local.destination_uri } + +output "linked_dataset_name" { + description = "The resource name of the linked BigQuery dataset." + value = var.linked_dataset_id != null ? google_logging_linked_dataset.linked_dataset[0].name : "" +} diff --git a/modules/logbucket/variables.tf b/modules/logbucket/variables.tf index 5b3e78a4..5fc8b1c1 100644 --- a/modules/logbucket/variables.tf +++ b/modules/logbucket/variables.tf @@ -46,3 +46,21 @@ variable "grant_write_permission_on_bkt" { type = bool default = true } + +variable "enable_analytics" { + description = "(Optional) Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled." + type = bool + default = false +} + +variable "linked_dataset_id" { + description = "The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters." + type = string + default = null +} + +variable "linked_dataset_description" { + description = "A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters." + type = string + default = null +} diff --git a/modules/logbucket/versions.tf b/modules/logbucket/versions.tf index c43740be..f3fb48fb 100644 --- a/modules/logbucket/versions.tf +++ b/modules/logbucket/versions.tf @@ -20,7 +20,7 @@ terraform { google = { source = "hashicorp/google" - version = ">= 3.53, < 6" + version = ">= 4.59, < 6" } } diff --git a/test/integration/logbucket-project/logbucket_project_test.go b/test/integration/logbucket-project/logbucket_project_test.go index 9247c717..76bfe3d8 100644 --- a/test/integration/logbucket-project/logbucket_project_test.go +++ b/test/integration/logbucket-project/logbucket_project_test.go @@ -39,6 +39,8 @@ func TestLogBucketProjectModule(t *testing.T) { for _, tc := range []struct { projId string bktName string + linkedDsName string + linkedDsID string sinkDest string sinkProjId string sinkName string @@ -47,17 +49,21 @@ func TestLogBucketProjectModule(t *testing.T) { { projId: bpt.GetStringOutput("log_bucket_project"), bktName: bpt.GetStringOutput("log_bucket_name"), + linkedDsName: bpt.GetStringOutput("log_bucket_linked_ds_name"), + linkedDsID: "log_analytics_dataset", sinkDest: bpt.GetStringOutput("log_sink_destination_uri"), sinkProjId: bpt.GetStringOutput("log_sink_project_id"), sinkName: bpt.GetStringOutput("log_sink_resource_name"), writerIdentity: bpt.GetStringOutput("log_sink_writer_identity"), }, { - projId: bpt.GetStringOutput("log_bkt_same_proj"), - bktName: bpt.GetStringOutput("log_bkt_name_same_proj"), - sinkDest: bpt.GetStringOutput("log_sink_dest_uri_same_proj"), - sinkProjId: bpt.GetStringOutput("log_sink_id_same_proj"), - sinkName: bpt.GetStringOutput("log_sink_resource_name_same_proj"), + projId: bpt.GetStringOutput("log_bkt_same_proj"), + bktName: bpt.GetStringOutput("log_bkt_name_same_proj"), + linkedDsName: bpt.GetStringOutput("log_bkt_linked_ds_name_same_proj"), + linkedDsID: "log_analytics_dataset_same", + sinkDest: bpt.GetStringOutput("log_sink_dest_uri_same_proj"), + sinkProjId: bpt.GetStringOutput("log_sink_id_same_proj"), + sinkName: bpt.GetStringOutput("log_sink_resource_name_same_proj"), // writerIdentity: As sink and bucket are in same project no service account is needed and writerIdentity is empty }, } { @@ -77,6 +83,13 @@ func TestLogBucketProjectModule(t *testing.T) { assert.Equal(tc.sinkDest, logSinkDetails.Get("destination").String(), "log sink destination should match") assert.Equal("resource.type = gce_instance", logSinkDetails.Get("filter").String(), "log sink filter should match") assert.Equal(tc.writerIdentity, logSinkDetails.Get("writerIdentity").String(), "log sink writerIdentity should match") + + // assert linked dataset name & BigQuery Dataset ID + projectNumber := gcloud.Runf(t, "projects describe %s", tc.projId).Get("projectNumber").String() + bigqueryDatasetID := fmt.Sprintf("bigquery.googleapis.com/projects/%s/datasets/%s", projectNumber, tc.linkedDsID) + linkedDs := gcloud.Runf(t, "logging links describe %s --bucket=%s --location=%s --project=%s", tc.linkedDsID, tc.bktName, "global", tc.projId) + assert.Equal(tc.linkedDsName, linkedDs.Get("name").String(), "log bucket linked dataset name should match") + assert.Equal(bigqueryDatasetID, linkedDs.Get("bigqueryDataset.datasetId").String(), "log bucket BigQuery dataset ID should match") } //*****************************