gpt4 book ai didi

google-cloud-platform - shell 脚本中的 Terraform 变量引用

转载 作者:行者123 更新时间:2023-12-04 05:59:28 32 4
gpt4 key购买 nike

我正在尝试在 Google Cloud Compute 引擎实例上部署 Airflow。

在初始化操作(初始化云 VM 的 Shell 脚本)中有一些特定的事情要更改,以便部署脚本。我想知道我是否可以使用 terraform 来处理这个问题。

这是我的地形脚本。

provider "google" {
region = "${var.region}"
project = "${var.project_name}"
credentials = "${file("${var.credentials_file_path}")}"
zone = "${var.region_zone}"
}

resource "google_sql_database_instance" "master" {
name = "${var.db_instance}"
region = "${var.region}"
settings {
tier = "db-n1-standard-1"
}
}


resource "google_sql_user" "users" {
name = "${var.db_user}"
instance = "${google_sql_database_instance.master.name}"
host = "%"
password = "${var.db_password}"
depends_on = ["google_sql_database_instance.master"]
}


resource "google_sql_database" "airflow" {
name = "${var.db_name}"
instance = "${google_sql_database_instance.master.name}"
charset = "utf8"
collation = "utf8_general_ci"
depends_on = ["google_sql_database_instance.master"]
}

resource "google_compute_instance" "default" {
name = "${var.machine_name}"
machine_type = "${var.machine_type}"
tags = ["http-tag", "https-tag", "http-server", "https-server"]

boot_disk {
initialize_params {
image = "projects/debian-cloud/global/images/family/debian-8"
}
}

network_interface {
network = "default"

access_config {
// Ephemeral IP
}
}
//----------------------------My Custom Script-------------------------
metadata_startup_script = "${file("scripts/airflow-instance.sh")}"
//---------------------------------------------------------------------
service_account {
email = "*****@project-name.iam.gserviceaccount.com"
scopes = ["https://www.googleapis.com/auth/cloud-platform"]
}
depends_on = ["google_sql_database_instance.master",
"google_sql_user.users","google_sql_database.airflow" ]
}

resource "google_compute_firewall" "default" {
name = "terraform-airflow-firewall"
network = "default"

allow {
protocol = "tcp"
ports = ["80", "8080"]
}

target_tags = ["https-server", "http-server"]

}

Shell脚本如下

动态变化标有**值**

#!/bin/bash
# Download the proxy and make it executable.
sudo mkdir /opt/cloud_sql_proxy
cd /opt/cloud_sql_proxy
sudo wget https://dl.google.com/cloudsql/cloud_sql_proxy.linux.amd64 -O cloud_sql_proxy
sudo chmod +x /opt/cloud_sql_proxy/cloud_sql_proxy

# Start the CloudSQL proxy specifying the database instance to connect to.
# Replace INSTANCE_CONNECTION_NAME with your actual CloudSQL instance connection name. It can be found in the instance properties on the GCP console.
nohup ./cloud_sql_proxy -instances=**PROJECT_NAME**:us-east1:**CLOUD_SQL_INSTANCE_NAME**=tcp:3306 &
# Install prerequisites.
sudo apt-get update && sudo apt-get install -y \
python3-pip \
python3-dev \
build-essential \
libssl-dev \
libffi-dev \
libmysqlclient-dev

# Upgrade pip.
sudo easy_install3 -U pip

# Install some other stuff.
sudo pip3 install mysqlclient

# Install a missed dependency
sudo pip3 install --ignore-installed six

# Install Airflow with the extra package gcp_api containing the hooks and operators for the GCP services.
sudo pip3 install apache-airflow[gcp_api]
sudo pip3 install oauth2client
sudo pip3 install google-api-python-client

# Create AIRFLOW_HOME directory.
export AIRFLOW_HOME=/airflow
sudo mkdir $AIRFLOW_HOME
sudo mkdir $AIRFLOW_HOME/dags
sudo chmod 777 $AIRFLOW_HOME
sudo chmod 777 $AIRFLOW_HOME/dags

cd $AIRFLOW_HOME

# Run Airflow a first time to create the airflow.cfg configuration file and edit it.
airflow version

#Update airflow.cfg for our config
sed -i 's/executor = SequentialExecutor/executor = LocalExecutor/g' airflow.cfg
sed -i 's/load_examples = True/load_examples = False/g' airflow.cfg
sed -i 's/sql_alchemy_conn = sqlite:\/\/\/\/airflow\/airflow.db/sql_alchemy_conn = mysql:\/\/**USER:PASSWORD**@127.0.0.1:3306\/**DB_NAME**/g' airflow.cfg

airflow initdb

nohup airflow webserver -p 8080 &
nohup airflow scheduler &

最佳答案

您可以尝试使用 template_file数据源。它可用于呈现带有值的模板。整个事情可能看起来像这样:

variable "project_name" { type = "string" }

data "template_file" "airflow_instance" {
template = "${file("${path.module}/scripts/airflow-instance.sh")}"

vars {
PROJECT_NAME = "${var.project_name}"
...
}
}

resource "google_compute_instance" "default" {

...

metadata_startup_script = "${data.template_file.airflow_instance.rendered}"

...
}

关于google-cloud-platform - shell 脚本中的 Terraform 变量引用,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/48990930/

32 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com