File tree Expand file tree Collapse file tree
GCP Dataflow to Elasticsearch Expand file tree Collapse file tree Original file line number Diff line number Diff line change 1+ # Ignore any terraform specific folder & hidden files
2+ .terraform *
3+ # Ignore terraform state and vars
4+ terraform.tf *
Original file line number Diff line number Diff line change 1+ terraform 1.0.3
Original file line number Diff line number Diff line change 1+ # GCP Dataflow to Elasticsearch
2+
3+ This example allow to setup a GCP Dataflow job to forward GCP Audit logs data to Elasticsearch.
4+
5+ ## Setup
6+
7+ Terraform CLI is required for this example.
8+
9+ Install ` GCP ` integration in your Elasticsearch cluster (example tested with 1.0.1).
10+
11+ ## Versions
12+
13+ Tested with Terraform 1.0.3.
14+
15+ Tested with Elastic stack 7.15.0.
16+
17+
Original file line number Diff line number Diff line change 1+ resource "google_pubsub_topic" "audit_log_to_es" {
2+ name = " audit-logs-to-es"
3+ }
4+
5+ resource "google_pubsub_subscription" "audit_log_to_es" {
6+ name = " audit-logs-to-es"
7+ topic = google_pubsub_topic. audit_log_to_es . name
8+ }
9+
10+ resource "google_pubsub_topic" "audit_log_to_es_errors" {
11+ name = " audit-logs-to-es-errors"
12+ }
13+
14+ resource "google_logging_project_sink" "audit_log_to_es" {
15+ name = " audit-logs-to-es"
16+ description = " Sink Audit logs with sampling for Dataflow Elasticsearch forwarder"
17+
18+ destination = " pubsub.googleapis.com/${ google_pubsub_topic . audit_log_to_es . id } "
19+ filter = " protoPayload.@type=\" type.googleapis.com/google.cloud.audit.AuditLog\" and sample(insertId, ${ var . audit_log_sampling } )"
20+
21+ unique_writer_identity = true
22+ }
23+
24+ resource "google_dataflow_flex_template_job" "forward_audit_logs_to_es" {
25+ provider = google- beta
26+ name = " forward-audit-logs-to-es"
27+
28+ on_delete = var. audit_log_on_job_delete
29+
30+ container_spec_gcs_path = " gs://dataflow-templates/${ var . dataflow_template_version } /flex/PubSub_to_Elasticsearch"
31+
32+ parameters = {
33+ dataset = " audit"
34+
35+ connectionUrl = var.connection_url
36+ apiKey = var.api_key
37+
38+ inputSubscription = google_pubsub_subscription.audit_log_to_es.id
39+ errorOutputTopic = google_pubsub_topic.audit_log_to_es_errors.id
40+
41+ batchSize = var.audit_log_forwarder_batch_size
42+ }
43+ }
Original file line number Diff line number Diff line change 1+ provider "google" {
2+ project = var. project_id
3+ region = var. region
4+ }
5+
6+ provider "google-beta" {
7+ project = var. project_id
8+ region = var. region
9+ }
Original file line number Diff line number Diff line change 1+ variable "project_id" {
2+ type = string
3+ description = " The GCP Project ID."
4+ }
5+
6+ variable "region" {
7+ type = string
8+ description = " GCP region to create resources into."
9+ }
10+
11+ variable "connection_url" {
12+ type = string
13+ description = " The Elasticsearch Cloud ID or connection URL."
14+ }
15+
16+ variable "api_key" {
17+ type = string
18+ description = " The Elasticsearch API key."
19+ }
20+
21+ variable "dataflow_template_version" {
22+ type = string
23+ description = " GCP Dataflow Flex template version. See https://cloud.google.com/dataflow/docs/guides/templates/provided-streaming."
24+ default = " latest"
25+ }
26+
27+ # Audit logs
28+ variable "audit_log_sampling" {
29+ type = number
30+ description = " Sampling fraction (0 to 1) for Audit logs"
31+ default = 1
32+ }
33+
34+ variable "audit_log_forwarder_batch_size" {
35+ type = number
36+ description = " Batch size for Audit logs fowarder job"
37+ default = 1000
38+ }
39+
40+ variable "audit_log_on_job_delete" {
41+ type = string
42+ description = " Action to perform when Audit logs forwarder job is deleted (drain or cancel)"
43+ default = " drain"
44+ }
You can’t perform that action at this time.
0 commit comments