我有一个云调度程序,它应该使用 Flex 模板部署数据流作业。我进行了 terraform 更改来创建云调度程序,从而创建云调度程序作业。但是,当我尝试运行该作业时,它给出了错误
{"@type":"type.googleapis.com/google.cloud.scheduler.logging.AttemptFinished", "jobName":"projects/engineering-199721/locations/us-central1/jobs/dummy-batch-daily-job", "status":"INVALID_ARGUMENT", "targetType":"HTTP", "url":"https://dataflow.googleapis.com/v1b3/projects/engineering-199721/locations/us-central1/templates:launch?gcsPath=gs://inf_bigdata_dev15_temp_location/dataflow/templates/sample_batch_job_D3.1.7-47-g3c619"}
下面是我的tf代码
resource "google_cloud_scheduler_job" "dummy-batch_daily"
{
name = "dummy-batch-daily-job"
schedule = "0 2 * * *" # 2 am daily
region = var.GCP_REGION
time_zone = "America/Los_Angeles"
http_target {
http_method = "POST"
uri = "https://dataflow.googleapis.com/v1b3/projects/${var.GCP_PROJECT}/locations/${var.GCP_REGION}/templates:launch?gcsPath=gs://xyz/dataflow/templates/sample_batch_job_D3.1.7-47-g3c619"
oauth_token {
service_account_email = "[email protected]"
}
body = base64encode(<<-EOT
{
"jobName": "dummy-batch",
"parameters": {
"project": "${var.GCP_PROJECT}",
"deploymentName": "${var.INF_PURE_DEPLOYMENT}",
"bigQueryProject": "${var.GCP_PROJECT}",
"bigQueryTable": "${var.INF_BIGDATA_TELEMETRY_REPROCESS_BIGQUERY_TABLE}",
"bigQueryDataset": "${var.INF_BIGDATA_BIGQUERY_DATASET}",
"bigDataDisplayVersion": 1
},
"environment": {
"subnetwork": "regions/${var.GCP_REGION}/subnetworks/${var.INF_BIGDATA_SUBNET}",
"tempLocation": "${var.INF_BIGDATA_PIPELINE_TEMP_LOCATION}/dataflow",
"numWorkers": ${var.INF_BIGDATA_START_NUM_WORKERS}
}
}
EOT
)
headers = {
"Content-Type" = "application/json"
}
}
}
尝试过https://medium.com/@zhongchen/schedule-your-dataflow-batch-jobs-with-cloud-scheduler-8390e0e958e, 也尝试删除“参数”块,但仍然是同样的问题。
如有任何帮助,我们将不胜感激。
含义:
resource "google_cloud_scheduler_job" "dummy-batch_daily" {
name = "dummy-batch-daily-job"
schedule = "0 2 * * *" # 2 am daily
region = ${var.GCP_REGION}
http_target {
http_method = "POST"
uri = "https://dataflow.googleapis.com/v1b3/projects/${var.GCP_PROJECT}/locations/${var.GCP_REGION}/flexTemplates:launch"
oauth_token {
service_account_email = "[email protected]"
}
body = base64encode(<<-EOT
{
"launchParameter": {
"jobName": "dummy-batch",
"containerSpecGcsPath": "gs://xyz/dataflow/templates/sample_batch_job_D3.1.7-47-g3c619",
"parameters": {
"project": "${var.GCP_PROJECT}",
"deploymentName": "${var.INF_PURE_DEPLOYMENT}",
"bigQueryProject": "${var.GCP_PROJECT}",
"bigQueryTable": "${var.INF_BIGDATA_TELEMETRY_REPROCESS_BIGQUERY_TABLE}",
"bigQueryDataset": "${var.INF_BIGDATA_BIGQUERY_DATASET}",
"bigDataDisplayVersion": 1
},
"environment": {
"subnetwork": "regions/${var.GCP_REGION}/subnetworks/${var.INF_BIGDATA_SUBNET}",
"tempLocation": "${var.INF_BIGDATA_PIPELINE_TEMP_LOCATION}/dataflow",
"numWorkers": ${var.INF_BIGDATA_START_NUM_WORKERS}
}
}
}
EOT
)
}
}