shell脚本中的TerraForm变量引用 [英] Terraform variable referencing in shell script

查看:12
本文介绍了shell脚本中的TerraForm变量引用的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我正在尝试在Google Cloud Compute Engine实例上部署气流。

为了部署脚本,在初始化操作(初始化云VM的Shell脚本)中需要更改一些特定的内容。我想知道我是否可以用Terraform来处理这个问题。

这是我的地形脚本。

provider "google" {
  region      = "${var.region}"
  project     = "${var.project_name}"
  credentials = "${file("${var.credentials_file_path}")}"
  zone        = "${var.region_zone}"
}

resource "google_sql_database_instance" "master" {
  name = "${var.db_instance}"
  region = "${var.region}"
  settings {
    tier = "db-n1-standard-1"
  }
}


resource "google_sql_user" "users" {
  name     = "${var.db_user}"
  instance = "${google_sql_database_instance.master.name}"
  host     = "%"
  password = "${var.db_password}"
  depends_on = ["google_sql_database_instance.master"]
}


resource "google_sql_database" "airflow" {
  name      = "${var.db_name}"
  instance  = "${google_sql_database_instance.master.name}"
  charset   = "utf8"
  collation = "utf8_general_ci"
  depends_on = ["google_sql_database_instance.master"]
}

resource "google_compute_instance" "default" {
  name         = "${var.machine_name}"
  machine_type = "${var.machine_type}"
  tags         = ["http-tag", "https-tag", "http-server", "https-server"]

  boot_disk {
    initialize_params {
      image = "projects/debian-cloud/global/images/family/debian-8"
    }
  }

  network_interface {
    network = "default"

    access_config {
      // Ephemeral IP
    }
  }
//----------------------------My Custom Script-------------------------
  metadata_startup_script = "${file("scripts/airflow-instance.sh")}"
//---------------------------------------------------------------------
  service_account {
    email = "*****@project-name.iam.gserviceaccount.com"
    scopes = ["https://www.googleapis.com/auth/cloud-platform"]
  }
   depends_on = ["google_sql_database_instance.master",
"google_sql_user.users","google_sql_database.airflow" ]
}

resource "google_compute_firewall" "default" {
  name    = "terraform-airflow-firewall"
  network = "default"

  allow {
    protocol = "tcp"
    ports    = ["80", "8080"]
  }

  target_tags   = ["https-server", "http-server"]

}

Shell脚本如下

要动态更改的内容标有**值**

#!/bin/bash
# Download the proxy and make it executable. 
sudo mkdir /opt/cloud_sql_proxy
cd /opt/cloud_sql_proxy
sudo wget https://dl.google.com/cloudsql/cloud_sql_proxy.linux.amd64 -O cloud_sql_proxy
sudo chmod +x /opt/cloud_sql_proxy/cloud_sql_proxy

# Start the CloudSQL proxy specifying the database instance to connect to.
# Replace INSTANCE_CONNECTION_NAME with your actual CloudSQL instance connection name. It can be found in the instance properties on the GCP console.
nohup ./cloud_sql_proxy -instances=**PROJECT_NAME**:us-east1:**CLOUD_SQL_INSTANCE_NAME**=tcp:3306 &
# Install prerequisites.
sudo apt-get update && sudo apt-get install -y 
python3-pip 
python3-dev 
build-essential 
libssl-dev 
libffi-dev 
libmysqlclient-dev

# Upgrade pip.
sudo easy_install3 -U pip

# Install some other stuff.
sudo pip3 install mysqlclient

# Install a missed dependency
sudo pip3 install --ignore-installed six

# Install Airflow with the extra package gcp_api containing the hooks and operators for the GCP services.
sudo pip3 install apache-airflow[gcp_api]
sudo pip3 install oauth2client
sudo pip3 install google-api-python-client

# Create AIRFLOW_HOME directory.
export AIRFLOW_HOME=/airflow
sudo mkdir $AIRFLOW_HOME
sudo mkdir $AIRFLOW_HOME/dags
sudo chmod 777 $AIRFLOW_HOME
sudo chmod 777 $AIRFLOW_HOME/dags

cd $AIRFLOW_HOME 

# Run Airflow a first time to create the airflow.cfg configuration file and edit it.
airflow version

#Update airflow.cfg for our config
sed -i 's/executor = SequentialExecutor/executor = LocalExecutor/g' airflow.cfg
sed -i 's/load_examples = True/load_examples = False/g' airflow.cfg
sed -i 's/sql_alchemy_conn = sqlite:////airflow/airflow.db/sql_alchemy_conn = mysql://**USER:PASSWORD**@127.0.0.1:3306/**DB_NAME**/g' airflow.cfg

airflow initdb

nohup airflow webserver -p 8080 &
nohup airflow scheduler &

推荐答案

您可以尝试使用template_file数据源。它可用于呈现带有值的模板。那么整个事情可能看起来是这样的:

variable "project_name" { type = "string" }

data "template_file" "airflow_instance" {
  template = "${file("${path.module}/scripts/airflow-instance.sh")}"

  vars {
     PROJECT_NAME = "${var.project_name}"
     ...
  }
}

resource "google_compute_instance" "default" {

  ...

  metadata_startup_script = "${data.template_file.airflow_instance.rendered}"

  ...
}

这篇关于shell脚本中的TerraForm变量引用的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆