1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. dataproc
  5. WorkflowTemplate
Google Cloud v8.23.0 published on Monday, Mar 24, 2025 by Pulumi

gcp.dataproc.WorkflowTemplate

Explore with Pulumi AI

A Workflow Template is a reusable workflow configuration. It defines a graph of jobs with information on where to run those jobs.

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const template = new gcp.dataproc.WorkflowTemplate("template", {
    name: "template-example",
    location: "us-central1",
    placement: {
        managedCluster: {
            clusterName: "my-cluster",
            config: {
                gceClusterConfig: {
                    zone: "us-central1-a",
                    tags: [
                        "foo",
                        "bar",
                    ],
                },
                masterConfig: {
                    numInstances: 1,
                    machineType: "n1-standard-1",
                    diskConfig: {
                        bootDiskType: "pd-ssd",
                        bootDiskSizeGb: 15,
                    },
                },
                workerConfig: {
                    numInstances: 3,
                    machineType: "n1-standard-2",
                    diskConfig: {
                        bootDiskSizeGb: 10,
                        numLocalSsds: 2,
                    },
                },
                secondaryWorkerConfig: {
                    numInstances: 2,
                },
                softwareConfig: {
                    imageVersion: "2.0.35-debian10",
                },
            },
        },
    },
    jobs: [
        {
            stepId: "someJob",
            sparkJob: {
                mainClass: "SomeClass",
            },
        },
        {
            stepId: "otherJob",
            prerequisiteStepIds: ["someJob"],
            prestoJob: {
                queryFileUri: "someuri",
            },
        },
    ],
});
Copy
import pulumi
import pulumi_gcp as gcp

template = gcp.dataproc.WorkflowTemplate("template",
    name="template-example",
    location="us-central1",
    placement={
        "managed_cluster": {
            "cluster_name": "my-cluster",
            "config": {
                "gce_cluster_config": {
                    "zone": "us-central1-a",
                    "tags": [
                        "foo",
                        "bar",
                    ],
                },
                "master_config": {
                    "num_instances": 1,
                    "machine_type": "n1-standard-1",
                    "disk_config": {
                        "boot_disk_type": "pd-ssd",
                        "boot_disk_size_gb": 15,
                    },
                },
                "worker_config": {
                    "num_instances": 3,
                    "machine_type": "n1-standard-2",
                    "disk_config": {
                        "boot_disk_size_gb": 10,
                        "num_local_ssds": 2,
                    },
                },
                "secondary_worker_config": {
                    "num_instances": 2,
                },
                "software_config": {
                    "image_version": "2.0.35-debian10",
                },
            },
        },
    },
    jobs=[
        {
            "step_id": "someJob",
            "spark_job": {
                "main_class": "SomeClass",
            },
        },
        {
            "step_id": "otherJob",
            "prerequisite_step_ids": ["someJob"],
            "presto_job": {
                "query_file_uri": "someuri",
            },
        },
    ])
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataproc.NewWorkflowTemplate(ctx, "template", &dataproc.WorkflowTemplateArgs{
			Name:     pulumi.String("template-example"),
			Location: pulumi.String("us-central1"),
			Placement: &dataproc.WorkflowTemplatePlacementArgs{
				ManagedCluster: &dataproc.WorkflowTemplatePlacementManagedClusterArgs{
					ClusterName: pulumi.String("my-cluster"),
					Config: &dataproc.WorkflowTemplatePlacementManagedClusterConfigArgs{
						GceClusterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs{
							Zone: pulumi.String("us-central1-a"),
							Tags: pulumi.StringArray{
								pulumi.String("foo"),
								pulumi.String("bar"),
							},
						},
						MasterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs{
							NumInstances: pulumi.Int(1),
							MachineType:  pulumi.String("n1-standard-1"),
							DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs{
								BootDiskType:   pulumi.String("pd-ssd"),
								BootDiskSizeGb: pulumi.Int(15),
							},
						},
						WorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs{
							NumInstances: pulumi.Int(3),
							MachineType:  pulumi.String("n1-standard-2"),
							DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs{
								BootDiskSizeGb: pulumi.Int(10),
								NumLocalSsds:   pulumi.Int(2),
							},
						},
						SecondaryWorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs{
							NumInstances: pulumi.Int(2),
						},
						SoftwareConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs{
							ImageVersion: pulumi.String("2.0.35-debian10"),
						},
					},
				},
			},
			Jobs: dataproc.WorkflowTemplateJobArray{
				&dataproc.WorkflowTemplateJobArgs{
					StepId: pulumi.String("someJob"),
					SparkJob: &dataproc.WorkflowTemplateJobSparkJobArgs{
						MainClass: pulumi.String("SomeClass"),
					},
				},
				&dataproc.WorkflowTemplateJobArgs{
					StepId: pulumi.String("otherJob"),
					PrerequisiteStepIds: pulumi.StringArray{
						pulumi.String("someJob"),
					},
					PrestoJob: &dataproc.WorkflowTemplateJobPrestoJobArgs{
						QueryFileUri: pulumi.String("someuri"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var template = new Gcp.Dataproc.WorkflowTemplate("template", new()
    {
        Name = "template-example",
        Location = "us-central1",
        Placement = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementArgs
        {
            ManagedCluster = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterArgs
            {
                ClusterName = "my-cluster",
                Config = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigArgs
                {
                    GceClusterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs
                    {
                        Zone = "us-central1-a",
                        Tags = new[]
                        {
                            "foo",
                            "bar",
                        },
                    },
                    MasterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs
                    {
                        NumInstances = 1,
                        MachineType = "n1-standard-1",
                        DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs
                        {
                            BootDiskType = "pd-ssd",
                            BootDiskSizeGb = 15,
                        },
                    },
                    WorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs
                    {
                        NumInstances = 3,
                        MachineType = "n1-standard-2",
                        DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs
                        {
                            BootDiskSizeGb = 10,
                            NumLocalSsds = 2,
                        },
                    },
                    SecondaryWorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs
                    {
                        NumInstances = 2,
                    },
                    SoftwareConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs
                    {
                        ImageVersion = "2.0.35-debian10",
                    },
                },
            },
        },
        Jobs = new[]
        {
            new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
            {
                StepId = "someJob",
                SparkJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkJobArgs
                {
                    MainClass = "SomeClass",
                },
            },
            new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
            {
                StepId = "otherJob",
                PrerequisiteStepIds = new[]
                {
                    "someJob",
                },
                PrestoJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPrestoJobArgs
                {
                    QueryFileUri = "someuri",
                },
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.WorkflowTemplate;
import com.pulumi.gcp.dataproc.WorkflowTemplateArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobSparkJobArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobPrestoJobArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var template = new WorkflowTemplate("template", WorkflowTemplateArgs.builder()
            .name("template-example")
            .location("us-central1")
            .placement(WorkflowTemplatePlacementArgs.builder()
                .managedCluster(WorkflowTemplatePlacementManagedClusterArgs.builder()
                    .clusterName("my-cluster")
                    .config(WorkflowTemplatePlacementManagedClusterConfigArgs.builder()
                        .gceClusterConfig(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs.builder()
                            .zone("us-central1-a")
                            .tags(                            
                                "foo",
                                "bar")
                            .build())
                        .masterConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs.builder()
                            .numInstances(1)
                            .machineType("n1-standard-1")
                            .diskConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs.builder()
                                .bootDiskType("pd-ssd")
                                .bootDiskSizeGb(15)
                                .build())
                            .build())
                        .workerConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs.builder()
                            .numInstances(3)
                            .machineType("n1-standard-2")
                            .diskConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs.builder()
                                .bootDiskSizeGb(10)
                                .numLocalSsds(2)
                                .build())
                            .build())
                        .secondaryWorkerConfig(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs.builder()
                            .numInstances(2)
                            .build())
                        .softwareConfig(WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.builder()
                            .imageVersion("2.0.35-debian10")
                            .build())
                        .build())
                    .build())
                .build())
            .jobs(            
                WorkflowTemplateJobArgs.builder()
                    .stepId("someJob")
                    .sparkJob(WorkflowTemplateJobSparkJobArgs.builder()
                        .mainClass("SomeClass")
                        .build())
                    .build(),
                WorkflowTemplateJobArgs.builder()
                    .stepId("otherJob")
                    .prerequisiteStepIds("someJob")
                    .prestoJob(WorkflowTemplateJobPrestoJobArgs.builder()
                        .queryFileUri("someuri")
                        .build())
                    .build())
            .build());

    }
}
Copy
resources:
  template:
    type: gcp:dataproc:WorkflowTemplate
    properties:
      name: template-example
      location: us-central1
      placement:
        managedCluster:
          clusterName: my-cluster
          config:
            gceClusterConfig:
              zone: us-central1-a
              tags:
                - foo
                - bar
            masterConfig:
              numInstances: 1
              machineType: n1-standard-1
              diskConfig:
                bootDiskType: pd-ssd
                bootDiskSizeGb: 15
            workerConfig:
              numInstances: 3
              machineType: n1-standard-2
              diskConfig:
                bootDiskSizeGb: 10
                numLocalSsds: 2
            secondaryWorkerConfig:
              numInstances: 2
            softwareConfig:
              imageVersion: 2.0.35-debian10
      jobs:
        - stepId: someJob
          sparkJob:
            mainClass: SomeClass
        - stepId: otherJob
          prerequisiteStepIds:
            - someJob
          prestoJob:
            queryFileUri: someuri
Copy

Create WorkflowTemplate Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new WorkflowTemplate(name: string, args: WorkflowTemplateArgs, opts?: CustomResourceOptions);
@overload
def WorkflowTemplate(resource_name: str,
                     args: WorkflowTemplateArgs,
                     opts: Optional[ResourceOptions] = None)

@overload
def WorkflowTemplate(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     jobs: Optional[Sequence[WorkflowTemplateJobArgs]] = None,
                     location: Optional[str] = None,
                     placement: Optional[WorkflowTemplatePlacementArgs] = None,
                     dag_timeout: Optional[str] = None,
                     encryption_config: Optional[WorkflowTemplateEncryptionConfigArgs] = None,
                     labels: Optional[Mapping[str, str]] = None,
                     name: Optional[str] = None,
                     parameters: Optional[Sequence[WorkflowTemplateParameterArgs]] = None,
                     project: Optional[str] = None,
                     version: Optional[int] = None)
func NewWorkflowTemplate(ctx *Context, name string, args WorkflowTemplateArgs, opts ...ResourceOption) (*WorkflowTemplate, error)
public WorkflowTemplate(string name, WorkflowTemplateArgs args, CustomResourceOptions? opts = null)
public WorkflowTemplate(String name, WorkflowTemplateArgs args)
public WorkflowTemplate(String name, WorkflowTemplateArgs args, CustomResourceOptions options)
type: gcp:dataproc:WorkflowTemplate
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. WorkflowTemplateArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. WorkflowTemplateArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. WorkflowTemplateArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. WorkflowTemplateArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. WorkflowTemplateArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var workflowTemplateResource = new Gcp.Dataproc.WorkflowTemplate("workflowTemplateResource", new()
{
    Jobs = new[]
    {
        new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
        {
            StepId = "string",
            HadoopJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobHadoopJobArgs
            {
                ArchiveUris = new[]
                {
                    "string",
                },
                Args = new[]
                {
                    "string",
                },
                FileUris = new[]
                {
                    "string",
                },
                JarFileUris = new[]
                {
                    "string",
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateJobHadoopJobLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "string", "string" },
                    },
                },
                MainClass = "string",
                MainJarFileUri = "string",
                Properties = 
                {
                    { "string", "string" },
                },
            },
            HiveJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobHiveJobArgs
            {
                ContinueOnFailure = false,
                JarFileUris = new[]
                {
                    "string",
                },
                Properties = 
                {
                    { "string", "string" },
                },
                QueryFileUri = "string",
                QueryList = new Gcp.Dataproc.Inputs.WorkflowTemplateJobHiveJobQueryListArgs
                {
                    Queries = new[]
                    {
                        "string",
                    },
                },
                ScriptVariables = 
                {
                    { "string", "string" },
                },
            },
            Labels = 
            {
                { "string", "string" },
            },
            PigJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPigJobArgs
            {
                ContinueOnFailure = false,
                JarFileUris = new[]
                {
                    "string",
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPigJobLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "string", "string" },
                    },
                },
                Properties = 
                {
                    { "string", "string" },
                },
                QueryFileUri = "string",
                QueryList = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPigJobQueryListArgs
                {
                    Queries = new[]
                    {
                        "string",
                    },
                },
                ScriptVariables = 
                {
                    { "string", "string" },
                },
            },
            PrerequisiteStepIds = new[]
            {
                "string",
            },
            PrestoJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPrestoJobArgs
            {
                ClientTags = new[]
                {
                    "string",
                },
                ContinueOnFailure = false,
                LoggingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPrestoJobLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "string", "string" },
                    },
                },
                OutputFormat = "string",
                Properties = 
                {
                    { "string", "string" },
                },
                QueryFileUri = "string",
                QueryList = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPrestoJobQueryListArgs
                {
                    Queries = new[]
                    {
                        "string",
                    },
                },
            },
            PysparkJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPysparkJobArgs
            {
                MainPythonFileUri = "string",
                ArchiveUris = new[]
                {
                    "string",
                },
                Args = new[]
                {
                    "string",
                },
                FileUris = new[]
                {
                    "string",
                },
                JarFileUris = new[]
                {
                    "string",
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPysparkJobLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "string", "string" },
                    },
                },
                Properties = 
                {
                    { "string", "string" },
                },
                PythonFileUris = new[]
                {
                    "string",
                },
            },
            Scheduling = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSchedulingArgs
            {
                MaxFailuresPerHour = 0,
                MaxFailuresTotal = 0,
            },
            SparkJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkJobArgs
            {
                ArchiveUris = new[]
                {
                    "string",
                },
                Args = new[]
                {
                    "string",
                },
                FileUris = new[]
                {
                    "string",
                },
                JarFileUris = new[]
                {
                    "string",
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkJobLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "string", "string" },
                    },
                },
                MainClass = "string",
                MainJarFileUri = "string",
                Properties = 
                {
                    { "string", "string" },
                },
            },
            SparkRJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkRJobArgs
            {
                MainRFileUri = "string",
                ArchiveUris = new[]
                {
                    "string",
                },
                Args = new[]
                {
                    "string",
                },
                FileUris = new[]
                {
                    "string",
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkRJobLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "string", "string" },
                    },
                },
                Properties = 
                {
                    { "string", "string" },
                },
            },
            SparkSqlJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkSqlJobArgs
            {
                JarFileUris = new[]
                {
                    "string",
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkSqlJobLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "string", "string" },
                    },
                },
                Properties = 
                {
                    { "string", "string" },
                },
                QueryFileUri = "string",
                QueryList = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkSqlJobQueryListArgs
                {
                    Queries = new[]
                    {
                        "string",
                    },
                },
                ScriptVariables = 
                {
                    { "string", "string" },
                },
            },
        },
    },
    Location = "string",
    Placement = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementArgs
    {
        ClusterSelector = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementClusterSelectorArgs
        {
            ClusterLabels = 
            {
                { "string", "string" },
            },
            Zone = "string",
        },
        ManagedCluster = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterArgs
        {
            ClusterName = "string",
            Config = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigArgs
            {
                AutoscalingConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigArgs
                {
                    Policy = "string",
                },
                EncryptionConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigEncryptionConfigArgs
                {
                    GcePdKmsKeyName = "string",
                },
                EndpointConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigEndpointConfigArgs
                {
                    EnableHttpPortAccess = false,
                    HttpPorts = 
                    {
                        { "string", "string" },
                    },
                },
                GceClusterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs
                {
                    InternalIpOnly = false,
                    Metadata = 
                    {
                        { "string", "string" },
                    },
                    Network = "string",
                    NodeGroupAffinity = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityArgs
                    {
                        NodeGroup = "string",
                    },
                    PrivateIpv6GoogleAccess = "string",
                    ReservationAffinity = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityArgs
                    {
                        ConsumeReservationType = "string",
                        Key = "string",
                        Values = new[]
                        {
                            "string",
                        },
                    },
                    ServiceAccount = "string",
                    ServiceAccountScopes = new[]
                    {
                        "string",
                    },
                    ShieldedInstanceConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigArgs
                    {
                        EnableIntegrityMonitoring = false,
                        EnableSecureBoot = false,
                        EnableVtpm = false,
                    },
                    Subnetwork = "string",
                    Tags = new[]
                    {
                        "string",
                    },
                    Zone = "string",
                },
                GkeClusterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigArgs
                {
                    NamespacedGkeDeploymentTarget = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTargetArgs
                    {
                        ClusterNamespace = "string",
                        TargetGkeCluster = "string",
                    },
                },
                InitializationActions = new[]
                {
                    new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigInitializationActionArgs
                    {
                        ExecutableFile = "string",
                        ExecutionTimeout = "string",
                    },
                },
                LifecycleConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigLifecycleConfigArgs
                {
                    AutoDeleteTime = "string",
                    AutoDeleteTtl = "string",
                    IdleDeleteTtl = "string",
                    IdleStartTime = "string",
                },
                MasterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs
                {
                    Accelerators = new[]
                    {
                        new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorArgs
                        {
                            AcceleratorCount = 0,
                            AcceleratorType = "string",
                        },
                    },
                    DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs
                    {
                        BootDiskSizeGb = 0,
                        BootDiskType = "string",
                        NumLocalSsds = 0,
                    },
                    Image = "string",
                    InstanceNames = new[]
                    {
                        "string",
                    },
                    IsPreemptible = false,
                    MachineType = "string",
                    ManagedGroupConfigs = new[]
                    {
                        new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigArgs
                        {
                            InstanceGroupManagerName = "string",
                            InstanceTemplateName = "string",
                        },
                    },
                    MinCpuPlatform = "string",
                    NumInstances = 0,
                    Preemptibility = "string",
                },
                MetastoreConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMetastoreConfigArgs
                {
                    DataprocMetastoreService = "string",
                },
                SecondaryWorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs
                {
                    Accelerators = new[]
                    {
                        new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorArgs
                        {
                            AcceleratorCount = 0,
                            AcceleratorType = "string",
                        },
                    },
                    DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigArgs
                    {
                        BootDiskSizeGb = 0,
                        BootDiskType = "string",
                        NumLocalSsds = 0,
                    },
                    Image = "string",
                    InstanceNames = new[]
                    {
                        "string",
                    },
                    IsPreemptible = false,
                    MachineType = "string",
                    ManagedGroupConfigs = new[]
                    {
                        new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigArgs
                        {
                            InstanceGroupManagerName = "string",
                            InstanceTemplateName = "string",
                        },
                    },
                    MinCpuPlatform = "string",
                    NumInstances = 0,
                    Preemptibility = "string",
                },
                SecurityConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecurityConfigArgs
                {
                    KerberosConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigArgs
                    {
                        CrossRealmTrustAdminServer = "string",
                        CrossRealmTrustKdc = "string",
                        CrossRealmTrustRealm = "string",
                        CrossRealmTrustSharedPassword = "string",
                        EnableKerberos = false,
                        KdcDbKey = "string",
                        KeyPassword = "string",
                        Keystore = "string",
                        KeystorePassword = "string",
                        KmsKey = "string",
                        Realm = "string",
                        RootPrincipalPassword = "string",
                        TgtLifetimeHours = 0,
                        Truststore = "string",
                        TruststorePassword = "string",
                    },
                },
                SoftwareConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs
                {
                    ImageVersion = "string",
                    OptionalComponents = new[]
                    {
                        "string",
                    },
                    Properties = 
                    {
                        { "string", "string" },
                    },
                },
                StagingBucket = "string",
                TempBucket = "string",
                WorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs
                {
                    Accelerators = new[]
                    {
                        new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorArgs
                        {
                            AcceleratorCount = 0,
                            AcceleratorType = "string",
                        },
                    },
                    DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs
                    {
                        BootDiskSizeGb = 0,
                        BootDiskType = "string",
                        NumLocalSsds = 0,
                    },
                    Image = "string",
                    InstanceNames = new[]
                    {
                        "string",
                    },
                    IsPreemptible = false,
                    MachineType = "string",
                    ManagedGroupConfigs = new[]
                    {
                        new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigArgs
                        {
                            InstanceGroupManagerName = "string",
                            InstanceTemplateName = "string",
                        },
                    },
                    MinCpuPlatform = "string",
                    NumInstances = 0,
                    Preemptibility = "string",
                },
            },
            Labels = 
            {
                { "string", "string" },
            },
        },
    },
    DagTimeout = "string",
    EncryptionConfig = new Gcp.Dataproc.Inputs.WorkflowTemplateEncryptionConfigArgs
    {
        KmsKey = "string",
    },
    Labels = 
    {
        { "string", "string" },
    },
    Name = "string",
    Parameters = new[]
    {
        new Gcp.Dataproc.Inputs.WorkflowTemplateParameterArgs
        {
            Fields = new[]
            {
                "string",
            },
            Name = "string",
            Description = "string",
            Validation = new Gcp.Dataproc.Inputs.WorkflowTemplateParameterValidationArgs
            {
                Regex = new Gcp.Dataproc.Inputs.WorkflowTemplateParameterValidationRegexArgs
                {
                    Regexes = new[]
                    {
                        "string",
                    },
                },
                Values = new Gcp.Dataproc.Inputs.WorkflowTemplateParameterValidationValuesArgs
                {
                    Values = new[]
                    {
                        "string",
                    },
                },
            },
        },
    },
    Project = "string",
});
Copy
example, err := dataproc.NewWorkflowTemplate(ctx, "workflowTemplateResource", &dataproc.WorkflowTemplateArgs{
	Jobs: dataproc.WorkflowTemplateJobArray{
		&dataproc.WorkflowTemplateJobArgs{
			StepId: pulumi.String("string"),
			HadoopJob: &dataproc.WorkflowTemplateJobHadoopJobArgs{
				ArchiveUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				Args: pulumi.StringArray{
					pulumi.String("string"),
				},
				FileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				JarFileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				LoggingConfig: &dataproc.WorkflowTemplateJobHadoopJobLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				MainClass:      pulumi.String("string"),
				MainJarFileUri: pulumi.String("string"),
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
			},
			HiveJob: &dataproc.WorkflowTemplateJobHiveJobArgs{
				ContinueOnFailure: pulumi.Bool(false),
				JarFileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
				QueryFileUri: pulumi.String("string"),
				QueryList: &dataproc.WorkflowTemplateJobHiveJobQueryListArgs{
					Queries: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
				ScriptVariables: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
			},
			Labels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			PigJob: &dataproc.WorkflowTemplateJobPigJobArgs{
				ContinueOnFailure: pulumi.Bool(false),
				JarFileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				LoggingConfig: &dataproc.WorkflowTemplateJobPigJobLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
				QueryFileUri: pulumi.String("string"),
				QueryList: &dataproc.WorkflowTemplateJobPigJobQueryListArgs{
					Queries: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
				ScriptVariables: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
			},
			PrerequisiteStepIds: pulumi.StringArray{
				pulumi.String("string"),
			},
			PrestoJob: &dataproc.WorkflowTemplateJobPrestoJobArgs{
				ClientTags: pulumi.StringArray{
					pulumi.String("string"),
				},
				ContinueOnFailure: pulumi.Bool(false),
				LoggingConfig: &dataproc.WorkflowTemplateJobPrestoJobLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				OutputFormat: pulumi.String("string"),
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
				QueryFileUri: pulumi.String("string"),
				QueryList: &dataproc.WorkflowTemplateJobPrestoJobQueryListArgs{
					Queries: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
			},
			PysparkJob: &dataproc.WorkflowTemplateJobPysparkJobArgs{
				MainPythonFileUri: pulumi.String("string"),
				ArchiveUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				Args: pulumi.StringArray{
					pulumi.String("string"),
				},
				FileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				JarFileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				LoggingConfig: &dataproc.WorkflowTemplateJobPysparkJobLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
				PythonFileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
			},
			Scheduling: &dataproc.WorkflowTemplateJobSchedulingArgs{
				MaxFailuresPerHour: pulumi.Int(0),
				MaxFailuresTotal:   pulumi.Int(0),
			},
			SparkJob: &dataproc.WorkflowTemplateJobSparkJobArgs{
				ArchiveUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				Args: pulumi.StringArray{
					pulumi.String("string"),
				},
				FileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				JarFileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				LoggingConfig: &dataproc.WorkflowTemplateJobSparkJobLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				MainClass:      pulumi.String("string"),
				MainJarFileUri: pulumi.String("string"),
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
			},
			SparkRJob: &dataproc.WorkflowTemplateJobSparkRJobArgs{
				MainRFileUri: pulumi.String("string"),
				ArchiveUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				Args: pulumi.StringArray{
					pulumi.String("string"),
				},
				FileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				LoggingConfig: &dataproc.WorkflowTemplateJobSparkRJobLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
			},
			SparkSqlJob: &dataproc.WorkflowTemplateJobSparkSqlJobArgs{
				JarFileUris: pulumi.StringArray{
					pulumi.String("string"),
				},
				LoggingConfig: &dataproc.WorkflowTemplateJobSparkSqlJobLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
				QueryFileUri: pulumi.String("string"),
				QueryList: &dataproc.WorkflowTemplateJobSparkSqlJobQueryListArgs{
					Queries: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
				ScriptVariables: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
			},
		},
	},
	Location: pulumi.String("string"),
	Placement: &dataproc.WorkflowTemplatePlacementArgs{
		ClusterSelector: &dataproc.WorkflowTemplatePlacementClusterSelectorArgs{
			ClusterLabels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			Zone: pulumi.String("string"),
		},
		ManagedCluster: &dataproc.WorkflowTemplatePlacementManagedClusterArgs{
			ClusterName: pulumi.String("string"),
			Config: &dataproc.WorkflowTemplatePlacementManagedClusterConfigArgs{
				AutoscalingConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigArgs{
					Policy: pulumi.String("string"),
				},
				EncryptionConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigEncryptionConfigArgs{
					GcePdKmsKeyName: pulumi.String("string"),
				},
				EndpointConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigEndpointConfigArgs{
					EnableHttpPortAccess: pulumi.Bool(false),
					HttpPorts: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				GceClusterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs{
					InternalIpOnly: pulumi.Bool(false),
					Metadata: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
					Network: pulumi.String("string"),
					NodeGroupAffinity: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityArgs{
						NodeGroup: pulumi.String("string"),
					},
					PrivateIpv6GoogleAccess: pulumi.String("string"),
					ReservationAffinity: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityArgs{
						ConsumeReservationType: pulumi.String("string"),
						Key:                    pulumi.String("string"),
						Values: pulumi.StringArray{
							pulumi.String("string"),
						},
					},
					ServiceAccount: pulumi.String("string"),
					ServiceAccountScopes: pulumi.StringArray{
						pulumi.String("string"),
					},
					ShieldedInstanceConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigArgs{
						EnableIntegrityMonitoring: pulumi.Bool(false),
						EnableSecureBoot:          pulumi.Bool(false),
						EnableVtpm:                pulumi.Bool(false),
					},
					Subnetwork: pulumi.String("string"),
					Tags: pulumi.StringArray{
						pulumi.String("string"),
					},
					Zone: pulumi.String("string"),
				},
				GkeClusterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigArgs{
					NamespacedGkeDeploymentTarget: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTargetArgs{
						ClusterNamespace: pulumi.String("string"),
						TargetGkeCluster: pulumi.String("string"),
					},
				},
				InitializationActions: dataproc.WorkflowTemplatePlacementManagedClusterConfigInitializationActionArray{
					&dataproc.WorkflowTemplatePlacementManagedClusterConfigInitializationActionArgs{
						ExecutableFile:   pulumi.String("string"),
						ExecutionTimeout: pulumi.String("string"),
					},
				},
				LifecycleConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigLifecycleConfigArgs{
					AutoDeleteTime: pulumi.String("string"),
					AutoDeleteTtl:  pulumi.String("string"),
					IdleDeleteTtl:  pulumi.String("string"),
					IdleStartTime:  pulumi.String("string"),
				},
				MasterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs{
					Accelerators: dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorArray{
						&dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorArgs{
							AcceleratorCount: pulumi.Int(0),
							AcceleratorType:  pulumi.String("string"),
						},
					},
					DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs{
						BootDiskSizeGb: pulumi.Int(0),
						BootDiskType:   pulumi.String("string"),
						NumLocalSsds:   pulumi.Int(0),
					},
					Image: pulumi.String("string"),
					InstanceNames: pulumi.StringArray{
						pulumi.String("string"),
					},
					IsPreemptible: pulumi.Bool(false),
					MachineType:   pulumi.String("string"),
					ManagedGroupConfigs: dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigArray{
						&dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigArgs{
							InstanceGroupManagerName: pulumi.String("string"),
							InstanceTemplateName:     pulumi.String("string"),
						},
					},
					MinCpuPlatform: pulumi.String("string"),
					NumInstances:   pulumi.Int(0),
					Preemptibility: pulumi.String("string"),
				},
				MetastoreConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMetastoreConfigArgs{
					DataprocMetastoreService: pulumi.String("string"),
				},
				SecondaryWorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs{
					Accelerators: dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorArray{
						&dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorArgs{
							AcceleratorCount: pulumi.Int(0),
							AcceleratorType:  pulumi.String("string"),
						},
					},
					DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigArgs{
						BootDiskSizeGb: pulumi.Int(0),
						BootDiskType:   pulumi.String("string"),
						NumLocalSsds:   pulumi.Int(0),
					},
					Image: pulumi.String("string"),
					InstanceNames: pulumi.StringArray{
						pulumi.String("string"),
					},
					IsPreemptible: pulumi.Bool(false),
					MachineType:   pulumi.String("string"),
					ManagedGroupConfigs: dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigArray{
						&dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigArgs{
							InstanceGroupManagerName: pulumi.String("string"),
							InstanceTemplateName:     pulumi.String("string"),
						},
					},
					MinCpuPlatform: pulumi.String("string"),
					NumInstances:   pulumi.Int(0),
					Preemptibility: pulumi.String("string"),
				},
				SecurityConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecurityConfigArgs{
					KerberosConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigArgs{
						CrossRealmTrustAdminServer:    pulumi.String("string"),
						CrossRealmTrustKdc:            pulumi.String("string"),
						CrossRealmTrustRealm:          pulumi.String("string"),
						CrossRealmTrustSharedPassword: pulumi.String("string"),
						EnableKerberos:                pulumi.Bool(false),
						KdcDbKey:                      pulumi.String("string"),
						KeyPassword:                   pulumi.String("string"),
						Keystore:                      pulumi.String("string"),
						KeystorePassword:              pulumi.String("string"),
						KmsKey:                        pulumi.String("string"),
						Realm:                         pulumi.String("string"),
						RootPrincipalPassword:         pulumi.String("string"),
						TgtLifetimeHours:              pulumi.Int(0),
						Truststore:                    pulumi.String("string"),
						TruststorePassword:            pulumi.String("string"),
					},
				},
				SoftwareConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs{
					ImageVersion: pulumi.String("string"),
					OptionalComponents: pulumi.StringArray{
						pulumi.String("string"),
					},
					Properties: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
				},
				StagingBucket: pulumi.String("string"),
				TempBucket:    pulumi.String("string"),
				WorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs{
					Accelerators: dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorArray{
						&dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorArgs{
							AcceleratorCount: pulumi.Int(0),
							AcceleratorType:  pulumi.String("string"),
						},
					},
					DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs{
						BootDiskSizeGb: pulumi.Int(0),
						BootDiskType:   pulumi.String("string"),
						NumLocalSsds:   pulumi.Int(0),
					},
					Image: pulumi.String("string"),
					InstanceNames: pulumi.StringArray{
						pulumi.String("string"),
					},
					IsPreemptible: pulumi.Bool(false),
					MachineType:   pulumi.String("string"),
					ManagedGroupConfigs: dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigArray{
						&dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigArgs{
							InstanceGroupManagerName: pulumi.String("string"),
							InstanceTemplateName:     pulumi.String("string"),
						},
					},
					MinCpuPlatform: pulumi.String("string"),
					NumInstances:   pulumi.Int(0),
					Preemptibility: pulumi.String("string"),
				},
			},
			Labels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
	},
	DagTimeout: pulumi.String("string"),
	EncryptionConfig: &dataproc.WorkflowTemplateEncryptionConfigArgs{
		KmsKey: pulumi.String("string"),
	},
	Labels: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Name: pulumi.String("string"),
	Parameters: dataproc.WorkflowTemplateParameterArray{
		&dataproc.WorkflowTemplateParameterArgs{
			Fields: pulumi.StringArray{
				pulumi.String("string"),
			},
			Name:        pulumi.String("string"),
			Description: pulumi.String("string"),
			Validation: &dataproc.WorkflowTemplateParameterValidationArgs{
				Regex: &dataproc.WorkflowTemplateParameterValidationRegexArgs{
					Regexes: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
				Values: &dataproc.WorkflowTemplateParameterValidationValuesArgs{
					Values: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
			},
		},
	},
	Project: pulumi.String("string"),
})
Copy
var workflowTemplateResource = new WorkflowTemplate("workflowTemplateResource", WorkflowTemplateArgs.builder()
    .jobs(WorkflowTemplateJobArgs.builder()
        .stepId("string")
        .hadoopJob(WorkflowTemplateJobHadoopJobArgs.builder()
            .archiveUris("string")
            .args("string")
            .fileUris("string")
            .jarFileUris("string")
            .loggingConfig(WorkflowTemplateJobHadoopJobLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .mainClass("string")
            .mainJarFileUri("string")
            .properties(Map.of("string", "string"))
            .build())
        .hiveJob(WorkflowTemplateJobHiveJobArgs.builder()
            .continueOnFailure(false)
            .jarFileUris("string")
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryList(WorkflowTemplateJobHiveJobQueryListArgs.builder()
                .queries("string")
                .build())
            .scriptVariables(Map.of("string", "string"))
            .build())
        .labels(Map.of("string", "string"))
        .pigJob(WorkflowTemplateJobPigJobArgs.builder()
            .continueOnFailure(false)
            .jarFileUris("string")
            .loggingConfig(WorkflowTemplateJobPigJobLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryList(WorkflowTemplateJobPigJobQueryListArgs.builder()
                .queries("string")
                .build())
            .scriptVariables(Map.of("string", "string"))
            .build())
        .prerequisiteStepIds("string")
        .prestoJob(WorkflowTemplateJobPrestoJobArgs.builder()
            .clientTags("string")
            .continueOnFailure(false)
            .loggingConfig(WorkflowTemplateJobPrestoJobLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .outputFormat("string")
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryList(WorkflowTemplateJobPrestoJobQueryListArgs.builder()
                .queries("string")
                .build())
            .build())
        .pysparkJob(WorkflowTemplateJobPysparkJobArgs.builder()
            .mainPythonFileUri("string")
            .archiveUris("string")
            .args("string")
            .fileUris("string")
            .jarFileUris("string")
            .loggingConfig(WorkflowTemplateJobPysparkJobLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .properties(Map.of("string", "string"))
            .pythonFileUris("string")
            .build())
        .scheduling(WorkflowTemplateJobSchedulingArgs.builder()
            .maxFailuresPerHour(0)
            .maxFailuresTotal(0)
            .build())
        .sparkJob(WorkflowTemplateJobSparkJobArgs.builder()
            .archiveUris("string")
            .args("string")
            .fileUris("string")
            .jarFileUris("string")
            .loggingConfig(WorkflowTemplateJobSparkJobLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .mainClass("string")
            .mainJarFileUri("string")
            .properties(Map.of("string", "string"))
            .build())
        .sparkRJob(WorkflowTemplateJobSparkRJobArgs.builder()
            .mainRFileUri("string")
            .archiveUris("string")
            .args("string")
            .fileUris("string")
            .loggingConfig(WorkflowTemplateJobSparkRJobLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .properties(Map.of("string", "string"))
            .build())
        .sparkSqlJob(WorkflowTemplateJobSparkSqlJobArgs.builder()
            .jarFileUris("string")
            .loggingConfig(WorkflowTemplateJobSparkSqlJobLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryList(WorkflowTemplateJobSparkSqlJobQueryListArgs.builder()
                .queries("string")
                .build())
            .scriptVariables(Map.of("string", "string"))
            .build())
        .build())
    .location("string")
    .placement(WorkflowTemplatePlacementArgs.builder()
        .clusterSelector(WorkflowTemplatePlacementClusterSelectorArgs.builder()
            .clusterLabels(Map.of("string", "string"))
            .zone("string")
            .build())
        .managedCluster(WorkflowTemplatePlacementManagedClusterArgs.builder()
            .clusterName("string")
            .config(WorkflowTemplatePlacementManagedClusterConfigArgs.builder()
                .autoscalingConfig(WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigArgs.builder()
                    .policy("string")
                    .build())
                .encryptionConfig(WorkflowTemplatePlacementManagedClusterConfigEncryptionConfigArgs.builder()
                    .gcePdKmsKeyName("string")
                    .build())
                .endpointConfig(WorkflowTemplatePlacementManagedClusterConfigEndpointConfigArgs.builder()
                    .enableHttpPortAccess(false)
                    .httpPorts(Map.of("string", "string"))
                    .build())
                .gceClusterConfig(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs.builder()
                    .internalIpOnly(false)
                    .metadata(Map.of("string", "string"))
                    .network("string")
                    .nodeGroupAffinity(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityArgs.builder()
                        .nodeGroup("string")
                        .build())
                    .privateIpv6GoogleAccess("string")
                    .reservationAffinity(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityArgs.builder()
                        .consumeReservationType("string")
                        .key("string")
                        .values("string")
                        .build())
                    .serviceAccount("string")
                    .serviceAccountScopes("string")
                    .shieldedInstanceConfig(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigArgs.builder()
                        .enableIntegrityMonitoring(false)
                        .enableSecureBoot(false)
                        .enableVtpm(false)
                        .build())
                    .subnetwork("string")
                    .tags("string")
                    .zone("string")
                    .build())
                .gkeClusterConfig(WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigArgs.builder()
                    .namespacedGkeDeploymentTarget(WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTargetArgs.builder()
                        .clusterNamespace("string")
                        .targetGkeCluster("string")
                        .build())
                    .build())
                .initializationActions(WorkflowTemplatePlacementManagedClusterConfigInitializationActionArgs.builder()
                    .executableFile("string")
                    .executionTimeout("string")
                    .build())
                .lifecycleConfig(WorkflowTemplatePlacementManagedClusterConfigLifecycleConfigArgs.builder()
                    .autoDeleteTime("string")
                    .autoDeleteTtl("string")
                    .idleDeleteTtl("string")
                    .idleStartTime("string")
                    .build())
                .masterConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs.builder()
                    .accelerators(WorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorArgs.builder()
                        .acceleratorCount(0)
                        .acceleratorType("string")
                        .build())
                    .diskConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs.builder()
                        .bootDiskSizeGb(0)
                        .bootDiskType("string")
                        .numLocalSsds(0)
                        .build())
                    .image("string")
                    .instanceNames("string")
                    .isPreemptible(false)
                    .machineType("string")
                    .managedGroupConfigs(WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigArgs.builder()
                        .instanceGroupManagerName("string")
                        .instanceTemplateName("string")
                        .build())
                    .minCpuPlatform("string")
                    .numInstances(0)
                    .preemptibility("string")
                    .build())
                .metastoreConfig(WorkflowTemplatePlacementManagedClusterConfigMetastoreConfigArgs.builder()
                    .dataprocMetastoreService("string")
                    .build())
                .secondaryWorkerConfig(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs.builder()
                    .accelerators(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorArgs.builder()
                        .acceleratorCount(0)
                        .acceleratorType("string")
                        .build())
                    .diskConfig(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigArgs.builder()
                        .bootDiskSizeGb(0)
                        .bootDiskType("string")
                        .numLocalSsds(0)
                        .build())
                    .image("string")
                    .instanceNames("string")
                    .isPreemptible(false)
                    .machineType("string")
                    .managedGroupConfigs(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigArgs.builder()
                        .instanceGroupManagerName("string")
                        .instanceTemplateName("string")
                        .build())
                    .minCpuPlatform("string")
                    .numInstances(0)
                    .preemptibility("string")
                    .build())
                .securityConfig(WorkflowTemplatePlacementManagedClusterConfigSecurityConfigArgs.builder()
                    .kerberosConfig(WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigArgs.builder()
                        .crossRealmTrustAdminServer("string")
                        .crossRealmTrustKdc("string")
                        .crossRealmTrustRealm("string")
                        .crossRealmTrustSharedPassword("string")
                        .enableKerberos(false)
                        .kdcDbKey("string")
                        .keyPassword("string")
                        .keystore("string")
                        .keystorePassword("string")
                        .kmsKey("string")
                        .realm("string")
                        .rootPrincipalPassword("string")
                        .tgtLifetimeHours(0)
                        .truststore("string")
                        .truststorePassword("string")
                        .build())
                    .build())
                .softwareConfig(WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.builder()
                    .imageVersion("string")
                    .optionalComponents("string")
                    .properties(Map.of("string", "string"))
                    .build())
                .stagingBucket("string")
                .tempBucket("string")
                .workerConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs.builder()
                    .accelerators(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorArgs.builder()
                        .acceleratorCount(0)
                        .acceleratorType("string")
                        .build())
                    .diskConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs.builder()
                        .bootDiskSizeGb(0)
                        .bootDiskType("string")
                        .numLocalSsds(0)
                        .build())
                    .image("string")
                    .instanceNames("string")
                    .isPreemptible(false)
                    .machineType("string")
                    .managedGroupConfigs(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigArgs.builder()
                        .instanceGroupManagerName("string")
                        .instanceTemplateName("string")
                        .build())
                    .minCpuPlatform("string")
                    .numInstances(0)
                    .preemptibility("string")
                    .build())
                .build())
            .labels(Map.of("string", "string"))
            .build())
        .build())
    .dagTimeout("string")
    .encryptionConfig(WorkflowTemplateEncryptionConfigArgs.builder()
        .kmsKey("string")
        .build())
    .labels(Map.of("string", "string"))
    .name("string")
    .parameters(WorkflowTemplateParameterArgs.builder()
        .fields("string")
        .name("string")
        .description("string")
        .validation(WorkflowTemplateParameterValidationArgs.builder()
            .regex(WorkflowTemplateParameterValidationRegexArgs.builder()
                .regexes("string")
                .build())
            .values(WorkflowTemplateParameterValidationValuesArgs.builder()
                .values("string")
                .build())
            .build())
        .build())
    .project("string")
    .build());
Copy
workflow_template_resource = gcp.dataproc.WorkflowTemplate("workflowTemplateResource",
    jobs=[{
        "step_id": "string",
        "hadoop_job": {
            "archive_uris": ["string"],
            "args": ["string"],
            "file_uris": ["string"],
            "jar_file_uris": ["string"],
            "logging_config": {
                "driver_log_levels": {
                    "string": "string",
                },
            },
            "main_class": "string",
            "main_jar_file_uri": "string",
            "properties": {
                "string": "string",
            },
        },
        "hive_job": {
            "continue_on_failure": False,
            "jar_file_uris": ["string"],
            "properties": {
                "string": "string",
            },
            "query_file_uri": "string",
            "query_list": {
                "queries": ["string"],
            },
            "script_variables": {
                "string": "string",
            },
        },
        "labels": {
            "string": "string",
        },
        "pig_job": {
            "continue_on_failure": False,
            "jar_file_uris": ["string"],
            "logging_config": {
                "driver_log_levels": {
                    "string": "string",
                },
            },
            "properties": {
                "string": "string",
            },
            "query_file_uri": "string",
            "query_list": {
                "queries": ["string"],
            },
            "script_variables": {
                "string": "string",
            },
        },
        "prerequisite_step_ids": ["string"],
        "presto_job": {
            "client_tags": ["string"],
            "continue_on_failure": False,
            "logging_config": {
                "driver_log_levels": {
                    "string": "string",
                },
            },
            "output_format": "string",
            "properties": {
                "string": "string",
            },
            "query_file_uri": "string",
            "query_list": {
                "queries": ["string"],
            },
        },
        "pyspark_job": {
            "main_python_file_uri": "string",
            "archive_uris": ["string"],
            "args": ["string"],
            "file_uris": ["string"],
            "jar_file_uris": ["string"],
            "logging_config": {
                "driver_log_levels": {
                    "string": "string",
                },
            },
            "properties": {
                "string": "string",
            },
            "python_file_uris": ["string"],
        },
        "scheduling": {
            "max_failures_per_hour": 0,
            "max_failures_total": 0,
        },
        "spark_job": {
            "archive_uris": ["string"],
            "args": ["string"],
            "file_uris": ["string"],
            "jar_file_uris": ["string"],
            "logging_config": {
                "driver_log_levels": {
                    "string": "string",
                },
            },
            "main_class": "string",
            "main_jar_file_uri": "string",
            "properties": {
                "string": "string",
            },
        },
        "spark_r_job": {
            "main_r_file_uri": "string",
            "archive_uris": ["string"],
            "args": ["string"],
            "file_uris": ["string"],
            "logging_config": {
                "driver_log_levels": {
                    "string": "string",
                },
            },
            "properties": {
                "string": "string",
            },
        },
        "spark_sql_job": {
            "jar_file_uris": ["string"],
            "logging_config": {
                "driver_log_levels": {
                    "string": "string",
                },
            },
            "properties": {
                "string": "string",
            },
            "query_file_uri": "string",
            "query_list": {
                "queries": ["string"],
            },
            "script_variables": {
                "string": "string",
            },
        },
    }],
    location="string",
    placement={
        "cluster_selector": {
            "cluster_labels": {
                "string": "string",
            },
            "zone": "string",
        },
        "managed_cluster": {
            "cluster_name": "string",
            "config": {
                "autoscaling_config": {
                    "policy": "string",
                },
                "encryption_config": {
                    "gce_pd_kms_key_name": "string",
                },
                "endpoint_config": {
                    "enable_http_port_access": False,
                    "http_ports": {
                        "string": "string",
                    },
                },
                "gce_cluster_config": {
                    "internal_ip_only": False,
                    "metadata": {
                        "string": "string",
                    },
                    "network": "string",
                    "node_group_affinity": {
                        "node_group": "string",
                    },
                    "private_ipv6_google_access": "string",
                    "reservation_affinity": {
                        "consume_reservation_type": "string",
                        "key": "string",
                        "values": ["string"],
                    },
                    "service_account": "string",
                    "service_account_scopes": ["string"],
                    "shielded_instance_config": {
                        "enable_integrity_monitoring": False,
                        "enable_secure_boot": False,
                        "enable_vtpm": False,
                    },
                    "subnetwork": "string",
                    "tags": ["string"],
                    "zone": "string",
                },
                "gke_cluster_config": {
                    "namespaced_gke_deployment_target": {
                        "cluster_namespace": "string",
                        "target_gke_cluster": "string",
                    },
                },
                "initialization_actions": [{
                    "executable_file": "string",
                    "execution_timeout": "string",
                }],
                "lifecycle_config": {
                    "auto_delete_time": "string",
                    "auto_delete_ttl": "string",
                    "idle_delete_ttl": "string",
                    "idle_start_time": "string",
                },
                "master_config": {
                    "accelerators": [{
                        "accelerator_count": 0,
                        "accelerator_type": "string",
                    }],
                    "disk_config": {
                        "boot_disk_size_gb": 0,
                        "boot_disk_type": "string",
                        "num_local_ssds": 0,
                    },
                    "image": "string",
                    "instance_names": ["string"],
                    "is_preemptible": False,
                    "machine_type": "string",
                    "managed_group_configs": [{
                        "instance_group_manager_name": "string",
                        "instance_template_name": "string",
                    }],
                    "min_cpu_platform": "string",
                    "num_instances": 0,
                    "preemptibility": "string",
                },
                "metastore_config": {
                    "dataproc_metastore_service": "string",
                },
                "secondary_worker_config": {
                    "accelerators": [{
                        "accelerator_count": 0,
                        "accelerator_type": "string",
                    }],
                    "disk_config": {
                        "boot_disk_size_gb": 0,
                        "boot_disk_type": "string",
                        "num_local_ssds": 0,
                    },
                    "image": "string",
                    "instance_names": ["string"],
                    "is_preemptible": False,
                    "machine_type": "string",
                    "managed_group_configs": [{
                        "instance_group_manager_name": "string",
                        "instance_template_name": "string",
                    }],
                    "min_cpu_platform": "string",
                    "num_instances": 0,
                    "preemptibility": "string",
                },
                "security_config": {
                    "kerberos_config": {
                        "cross_realm_trust_admin_server": "string",
                        "cross_realm_trust_kdc": "string",
                        "cross_realm_trust_realm": "string",
                        "cross_realm_trust_shared_password": "string",
                        "enable_kerberos": False,
                        "kdc_db_key": "string",
                        "key_password": "string",
                        "keystore": "string",
                        "keystore_password": "string",
                        "kms_key": "string",
                        "realm": "string",
                        "root_principal_password": "string",
                        "tgt_lifetime_hours": 0,
                        "truststore": "string",
                        "truststore_password": "string",
                    },
                },
                "software_config": {
                    "image_version": "string",
                    "optional_components": ["string"],
                    "properties": {
                        "string": "string",
                    },
                },
                "staging_bucket": "string",
                "temp_bucket": "string",
                "worker_config": {
                    "accelerators": [{
                        "accelerator_count": 0,
                        "accelerator_type": "string",
                    }],
                    "disk_config": {
                        "boot_disk_size_gb": 0,
                        "boot_disk_type": "string",
                        "num_local_ssds": 0,
                    },
                    "image": "string",
                    "instance_names": ["string"],
                    "is_preemptible": False,
                    "machine_type": "string",
                    "managed_group_configs": [{
                        "instance_group_manager_name": "string",
                        "instance_template_name": "string",
                    }],
                    "min_cpu_platform": "string",
                    "num_instances": 0,
                    "preemptibility": "string",
                },
            },
            "labels": {
                "string": "string",
            },
        },
    },
    dag_timeout="string",
    encryption_config={
        "kms_key": "string",
    },
    labels={
        "string": "string",
    },
    name="string",
    parameters=[{
        "fields": ["string"],
        "name": "string",
        "description": "string",
        "validation": {
            "regex": {
                "regexes": ["string"],
            },
            "values": {
                "values": ["string"],
            },
        },
    }],
    project="string")
Copy
const workflowTemplateResource = new gcp.dataproc.WorkflowTemplate("workflowTemplateResource", {
    jobs: [{
        stepId: "string",
        hadoopJob: {
            archiveUris: ["string"],
            args: ["string"],
            fileUris: ["string"],
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            mainClass: "string",
            mainJarFileUri: "string",
            properties: {
                string: "string",
            },
        },
        hiveJob: {
            continueOnFailure: false,
            jarFileUris: ["string"],
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryList: {
                queries: ["string"],
            },
            scriptVariables: {
                string: "string",
            },
        },
        labels: {
            string: "string",
        },
        pigJob: {
            continueOnFailure: false,
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryList: {
                queries: ["string"],
            },
            scriptVariables: {
                string: "string",
            },
        },
        prerequisiteStepIds: ["string"],
        prestoJob: {
            clientTags: ["string"],
            continueOnFailure: false,
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            outputFormat: "string",
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryList: {
                queries: ["string"],
            },
        },
        pysparkJob: {
            mainPythonFileUri: "string",
            archiveUris: ["string"],
            args: ["string"],
            fileUris: ["string"],
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            properties: {
                string: "string",
            },
            pythonFileUris: ["string"],
        },
        scheduling: {
            maxFailuresPerHour: 0,
            maxFailuresTotal: 0,
        },
        sparkJob: {
            archiveUris: ["string"],
            args: ["string"],
            fileUris: ["string"],
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            mainClass: "string",
            mainJarFileUri: "string",
            properties: {
                string: "string",
            },
        },
        sparkRJob: {
            mainRFileUri: "string",
            archiveUris: ["string"],
            args: ["string"],
            fileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            properties: {
                string: "string",
            },
        },
        sparkSqlJob: {
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryList: {
                queries: ["string"],
            },
            scriptVariables: {
                string: "string",
            },
        },
    }],
    location: "string",
    placement: {
        clusterSelector: {
            clusterLabels: {
                string: "string",
            },
            zone: "string",
        },
        managedCluster: {
            clusterName: "string",
            config: {
                autoscalingConfig: {
                    policy: "string",
                },
                encryptionConfig: {
                    gcePdKmsKeyName: "string",
                },
                endpointConfig: {
                    enableHttpPortAccess: false,
                    httpPorts: {
                        string: "string",
                    },
                },
                gceClusterConfig: {
                    internalIpOnly: false,
                    metadata: {
                        string: "string",
                    },
                    network: "string",
                    nodeGroupAffinity: {
                        nodeGroup: "string",
                    },
                    privateIpv6GoogleAccess: "string",
                    reservationAffinity: {
                        consumeReservationType: "string",
                        key: "string",
                        values: ["string"],
                    },
                    serviceAccount: "string",
                    serviceAccountScopes: ["string"],
                    shieldedInstanceConfig: {
                        enableIntegrityMonitoring: false,
                        enableSecureBoot: false,
                        enableVtpm: false,
                    },
                    subnetwork: "string",
                    tags: ["string"],
                    zone: "string",
                },
                gkeClusterConfig: {
                    namespacedGkeDeploymentTarget: {
                        clusterNamespace: "string",
                        targetGkeCluster: "string",
                    },
                },
                initializationActions: [{
                    executableFile: "string",
                    executionTimeout: "string",
                }],
                lifecycleConfig: {
                    autoDeleteTime: "string",
                    autoDeleteTtl: "string",
                    idleDeleteTtl: "string",
                    idleStartTime: "string",
                },
                masterConfig: {
                    accelerators: [{
                        acceleratorCount: 0,
                        acceleratorType: "string",
                    }],
                    diskConfig: {
                        bootDiskSizeGb: 0,
                        bootDiskType: "string",
                        numLocalSsds: 0,
                    },
                    image: "string",
                    instanceNames: ["string"],
                    isPreemptible: false,
                    machineType: "string",
                    managedGroupConfigs: [{
                        instanceGroupManagerName: "string",
                        instanceTemplateName: "string",
                    }],
                    minCpuPlatform: "string",
                    numInstances: 0,
                    preemptibility: "string",
                },
                metastoreConfig: {
                    dataprocMetastoreService: "string",
                },
                secondaryWorkerConfig: {
                    accelerators: [{
                        acceleratorCount: 0,
                        acceleratorType: "string",
                    }],
                    diskConfig: {
                        bootDiskSizeGb: 0,
                        bootDiskType: "string",
                        numLocalSsds: 0,
                    },
                    image: "string",
                    instanceNames: ["string"],
                    isPreemptible: false,
                    machineType: "string",
                    managedGroupConfigs: [{
                        instanceGroupManagerName: "string",
                        instanceTemplateName: "string",
                    }],
                    minCpuPlatform: "string",
                    numInstances: 0,
                    preemptibility: "string",
                },
                securityConfig: {
                    kerberosConfig: {
                        crossRealmTrustAdminServer: "string",
                        crossRealmTrustKdc: "string",
                        crossRealmTrustRealm: "string",
                        crossRealmTrustSharedPassword: "string",
                        enableKerberos: false,
                        kdcDbKey: "string",
                        keyPassword: "string",
                        keystore: "string",
                        keystorePassword: "string",
                        kmsKey: "string",
                        realm: "string",
                        rootPrincipalPassword: "string",
                        tgtLifetimeHours: 0,
                        truststore: "string",
                        truststorePassword: "string",
                    },
                },
                softwareConfig: {
                    imageVersion: "string",
                    optionalComponents: ["string"],
                    properties: {
                        string: "string",
                    },
                },
                stagingBucket: "string",
                tempBucket: "string",
                workerConfig: {
                    accelerators: [{
                        acceleratorCount: 0,
                        acceleratorType: "string",
                    }],
                    diskConfig: {
                        bootDiskSizeGb: 0,
                        bootDiskType: "string",
                        numLocalSsds: 0,
                    },
                    image: "string",
                    instanceNames: ["string"],
                    isPreemptible: false,
                    machineType: "string",
                    managedGroupConfigs: [{
                        instanceGroupManagerName: "string",
                        instanceTemplateName: "string",
                    }],
                    minCpuPlatform: "string",
                    numInstances: 0,
                    preemptibility: "string",
                },
            },
            labels: {
                string: "string",
            },
        },
    },
    dagTimeout: "string",
    encryptionConfig: {
        kmsKey: "string",
    },
    labels: {
        string: "string",
    },
    name: "string",
    parameters: [{
        fields: ["string"],
        name: "string",
        description: "string",
        validation: {
            regex: {
                regexes: ["string"],
            },
            values: {
                values: ["string"],
            },
        },
    }],
    project: "string",
});
Copy
type: gcp:dataproc:WorkflowTemplate
properties:
    dagTimeout: string
    encryptionConfig:
        kmsKey: string
    jobs:
        - hadoopJob:
            archiveUris:
                - string
            args:
                - string
            fileUris:
                - string
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            mainClass: string
            mainJarFileUri: string
            properties:
                string: string
          hiveJob:
            continueOnFailure: false
            jarFileUris:
                - string
            properties:
                string: string
            queryFileUri: string
            queryList:
                queries:
                    - string
            scriptVariables:
                string: string
          labels:
            string: string
          pigJob:
            continueOnFailure: false
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            properties:
                string: string
            queryFileUri: string
            queryList:
                queries:
                    - string
            scriptVariables:
                string: string
          prerequisiteStepIds:
            - string
          prestoJob:
            clientTags:
                - string
            continueOnFailure: false
            loggingConfig:
                driverLogLevels:
                    string: string
            outputFormat: string
            properties:
                string: string
            queryFileUri: string
            queryList:
                queries:
                    - string
          pysparkJob:
            archiveUris:
                - string
            args:
                - string
            fileUris:
                - string
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            mainPythonFileUri: string
            properties:
                string: string
            pythonFileUris:
                - string
          scheduling:
            maxFailuresPerHour: 0
            maxFailuresTotal: 0
          sparkJob:
            archiveUris:
                - string
            args:
                - string
            fileUris:
                - string
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            mainClass: string
            mainJarFileUri: string
            properties:
                string: string
          sparkRJob:
            archiveUris:
                - string
            args:
                - string
            fileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            mainRFileUri: string
            properties:
                string: string
          sparkSqlJob:
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            properties:
                string: string
            queryFileUri: string
            queryList:
                queries:
                    - string
            scriptVariables:
                string: string
          stepId: string
    labels:
        string: string
    location: string
    name: string
    parameters:
        - description: string
          fields:
            - string
          name: string
          validation:
            regex:
                regexes:
                    - string
            values:
                values:
                    - string
    placement:
        clusterSelector:
            clusterLabels:
                string: string
            zone: string
        managedCluster:
            clusterName: string
            config:
                autoscalingConfig:
                    policy: string
                encryptionConfig:
                    gcePdKmsKeyName: string
                endpointConfig:
                    enableHttpPortAccess: false
                    httpPorts:
                        string: string
                gceClusterConfig:
                    internalIpOnly: false
                    metadata:
                        string: string
                    network: string
                    nodeGroupAffinity:
                        nodeGroup: string
                    privateIpv6GoogleAccess: string
                    reservationAffinity:
                        consumeReservationType: string
                        key: string
                        values:
                            - string
                    serviceAccount: string
                    serviceAccountScopes:
                        - string
                    shieldedInstanceConfig:
                        enableIntegrityMonitoring: false
                        enableSecureBoot: false
                        enableVtpm: false
                    subnetwork: string
                    tags:
                        - string
                    zone: string
                gkeClusterConfig:
                    namespacedGkeDeploymentTarget:
                        clusterNamespace: string
                        targetGkeCluster: string
                initializationActions:
                    - executableFile: string
                      executionTimeout: string
                lifecycleConfig:
                    autoDeleteTime: string
                    autoDeleteTtl: string
                    idleDeleteTtl: string
                    idleStartTime: string
                masterConfig:
                    accelerators:
                        - acceleratorCount: 0
                          acceleratorType: string
                    diskConfig:
                        bootDiskSizeGb: 0
                        bootDiskType: string
                        numLocalSsds: 0
                    image: string
                    instanceNames:
                        - string
                    isPreemptible: false
                    machineType: string
                    managedGroupConfigs:
                        - instanceGroupManagerName: string
                          instanceTemplateName: string
                    minCpuPlatform: string
                    numInstances: 0
                    preemptibility: string
                metastoreConfig:
                    dataprocMetastoreService: string
                secondaryWorkerConfig:
                    accelerators:
                        - acceleratorCount: 0
                          acceleratorType: string
                    diskConfig:
                        bootDiskSizeGb: 0
                        bootDiskType: string
                        numLocalSsds: 0
                    image: string
                    instanceNames:
                        - string
                    isPreemptible: false
                    machineType: string
                    managedGroupConfigs:
                        - instanceGroupManagerName: string
                          instanceTemplateName: string
                    minCpuPlatform: string
                    numInstances: 0
                    preemptibility: string
                securityConfig:
                    kerberosConfig:
                        crossRealmTrustAdminServer: string
                        crossRealmTrustKdc: string
                        crossRealmTrustRealm: string
                        crossRealmTrustSharedPassword: string
                        enableKerberos: false
                        kdcDbKey: string
                        keyPassword: string
                        keystore: string
                        keystorePassword: string
                        kmsKey: string
                        realm: string
                        rootPrincipalPassword: string
                        tgtLifetimeHours: 0
                        truststore: string
                        truststorePassword: string
                softwareConfig:
                    imageVersion: string
                    optionalComponents:
                        - string
                    properties:
                        string: string
                stagingBucket: string
                tempBucket: string
                workerConfig:
                    accelerators:
                        - acceleratorCount: 0
                          acceleratorType: string
                    diskConfig:
                        bootDiskSizeGb: 0
                        bootDiskType: string
                        numLocalSsds: 0
                    image: string
                    instanceNames:
                        - string
                    isPreemptible: false
                    machineType: string
                    managedGroupConfigs:
                        - instanceGroupManagerName: string
                          instanceTemplateName: string
                    minCpuPlatform: string
                    numInstances: 0
                    preemptibility: string
            labels:
                string: string
    project: string
Copy

WorkflowTemplate Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The WorkflowTemplate resource accepts the following input properties:

Jobs
This property is required.
Changes to this property will trigger replacement.
List<WorkflowTemplateJob>
Required. The Directed Acyclic Graph of Jobs to submit.
Location
This property is required.
Changes to this property will trigger replacement.
string
The location for the resource
Placement
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacement
Required. WorkflowTemplate scheduling information.
DagTimeout Changes to this property will trigger replacement. string
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
EncryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfig
Optional. The encryption configuration for the workflow template.
Labels Dictionary<string, string>
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
Name Changes to this property will trigger replacement. string
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
Parameters Changes to this property will trigger replacement. List<WorkflowTemplateParameter>
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
Project Changes to this property will trigger replacement. string
The project for the resource
Version Changes to this property will trigger replacement. int
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

Jobs
This property is required.
Changes to this property will trigger replacement.
[]WorkflowTemplateJobArgs
Required. The Directed Acyclic Graph of Jobs to submit.
Location
This property is required.
Changes to this property will trigger replacement.
string
The location for the resource
Placement
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacementArgs
Required. WorkflowTemplate scheduling information.
DagTimeout Changes to this property will trigger replacement. string
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
EncryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfigArgs
Optional. The encryption configuration for the workflow template.
Labels map[string]string
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
Name Changes to this property will trigger replacement. string
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
Parameters Changes to this property will trigger replacement. []WorkflowTemplateParameterArgs
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
Project Changes to this property will trigger replacement. string
The project for the resource
Version Changes to this property will trigger replacement. int
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

jobs
This property is required.
Changes to this property will trigger replacement.
List<WorkflowTemplateJob>
Required. The Directed Acyclic Graph of Jobs to submit.
location
This property is required.
Changes to this property will trigger replacement.
String
The location for the resource
placement
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacement
Required. WorkflowTemplate scheduling information.
dagTimeout Changes to this property will trigger replacement. String
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
encryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfig
Optional. The encryption configuration for the workflow template.
labels Map<String,String>
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
name Changes to this property will trigger replacement. String
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. List<WorkflowTemplateParameter>
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
project Changes to this property will trigger replacement. String
The project for the resource
version Changes to this property will trigger replacement. Integer
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

jobs
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplateJob[]
Required. The Directed Acyclic Graph of Jobs to submit.
location
This property is required.
Changes to this property will trigger replacement.
string
The location for the resource
placement
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacement
Required. WorkflowTemplate scheduling information.
dagTimeout Changes to this property will trigger replacement. string
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
encryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfig
Optional. The encryption configuration for the workflow template.
labels {[key: string]: string}
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
name Changes to this property will trigger replacement. string
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. WorkflowTemplateParameter[]
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
project Changes to this property will trigger replacement. string
The project for the resource
version Changes to this property will trigger replacement. number
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

jobs
This property is required.
Changes to this property will trigger replacement.
Sequence[WorkflowTemplateJobArgs]
Required. The Directed Acyclic Graph of Jobs to submit.
location
This property is required.
Changes to this property will trigger replacement.
str
The location for the resource
placement
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacementArgs
Required. WorkflowTemplate scheduling information.
dag_timeout Changes to this property will trigger replacement. str
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
encryption_config Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfigArgs
Optional. The encryption configuration for the workflow template.
labels Mapping[str, str]
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
name Changes to this property will trigger replacement. str
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. Sequence[WorkflowTemplateParameterArgs]
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
project Changes to this property will trigger replacement. str
The project for the resource
version Changes to this property will trigger replacement. int
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

jobs
This property is required.
Changes to this property will trigger replacement.
List<Property Map>
Required. The Directed Acyclic Graph of Jobs to submit.
location
This property is required.
Changes to this property will trigger replacement.
String
The location for the resource
placement
This property is required.
Changes to this property will trigger replacement.
Property Map
Required. WorkflowTemplate scheduling information.
dagTimeout Changes to this property will trigger replacement. String
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
encryptionConfig Changes to this property will trigger replacement. Property Map
Optional. The encryption configuration for the workflow template.
labels Map<String>
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
name Changes to this property will trigger replacement. String
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. List<Property Map>
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
project Changes to this property will trigger replacement. String
The project for the resource
version Changes to this property will trigger replacement. Number
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

Outputs

All input properties are implicitly available as output properties. Additionally, the WorkflowTemplate resource produces the following output properties:

CreateTime string
Output only. The time template was created.
EffectiveLabels Dictionary<string, string>
Id string
The provider-assigned unique ID for this managed resource.
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
UpdateTime string
Output only. The time template was last updated.
CreateTime string
Output only. The time template was created.
EffectiveLabels map[string]string
Id string
The provider-assigned unique ID for this managed resource.
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
UpdateTime string
Output only. The time template was last updated.
createTime String
Output only. The time template was created.
effectiveLabels Map<String,String>
id String
The provider-assigned unique ID for this managed resource.
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
updateTime String
Output only. The time template was last updated.
createTime string
Output only. The time template was created.
effectiveLabels {[key: string]: string}
id string
The provider-assigned unique ID for this managed resource.
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
updateTime string
Output only. The time template was last updated.
create_time str
Output only. The time template was created.
effective_labels Mapping[str, str]
id str
The provider-assigned unique ID for this managed resource.
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
update_time str
Output only. The time template was last updated.
createTime String
Output only. The time template was created.
effectiveLabels Map<String>
id String
The provider-assigned unique ID for this managed resource.
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
updateTime String
Output only. The time template was last updated.

Look up Existing WorkflowTemplate Resource

Get an existing WorkflowTemplate resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: WorkflowTemplateState, opts?: CustomResourceOptions): WorkflowTemplate
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        create_time: Optional[str] = None,
        dag_timeout: Optional[str] = None,
        effective_labels: Optional[Mapping[str, str]] = None,
        encryption_config: Optional[WorkflowTemplateEncryptionConfigArgs] = None,
        jobs: Optional[Sequence[WorkflowTemplateJobArgs]] = None,
        labels: Optional[Mapping[str, str]] = None,
        location: Optional[str] = None,
        name: Optional[str] = None,
        parameters: Optional[Sequence[WorkflowTemplateParameterArgs]] = None,
        placement: Optional[WorkflowTemplatePlacementArgs] = None,
        project: Optional[str] = None,
        pulumi_labels: Optional[Mapping[str, str]] = None,
        update_time: Optional[str] = None,
        version: Optional[int] = None) -> WorkflowTemplate
func GetWorkflowTemplate(ctx *Context, name string, id IDInput, state *WorkflowTemplateState, opts ...ResourceOption) (*WorkflowTemplate, error)
public static WorkflowTemplate Get(string name, Input<string> id, WorkflowTemplateState? state, CustomResourceOptions? opts = null)
public static WorkflowTemplate get(String name, Output<String> id, WorkflowTemplateState state, CustomResourceOptions options)
resources:  _:    type: gcp:dataproc:WorkflowTemplate    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
CreateTime string
Output only. The time template was created.
DagTimeout Changes to this property will trigger replacement. string
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
EffectiveLabels Changes to this property will trigger replacement. Dictionary<string, string>
EncryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfig
Optional. The encryption configuration for the workflow template.
Jobs Changes to this property will trigger replacement. List<WorkflowTemplateJob>
Required. The Directed Acyclic Graph of Jobs to submit.
Labels Dictionary<string, string>
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
Location Changes to this property will trigger replacement. string
The location for the resource
Name Changes to this property will trigger replacement. string
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
Parameters Changes to this property will trigger replacement. List<WorkflowTemplateParameter>
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
Placement Changes to this property will trigger replacement. WorkflowTemplatePlacement
Required. WorkflowTemplate scheduling information.
Project Changes to this property will trigger replacement. string
The project for the resource
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
UpdateTime string
Output only. The time template was last updated.
Version Changes to this property will trigger replacement. int
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

CreateTime string
Output only. The time template was created.
DagTimeout Changes to this property will trigger replacement. string
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
EffectiveLabels Changes to this property will trigger replacement. map[string]string
EncryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfigArgs
Optional. The encryption configuration for the workflow template.
Jobs Changes to this property will trigger replacement. []WorkflowTemplateJobArgs
Required. The Directed Acyclic Graph of Jobs to submit.
Labels map[string]string
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
Location Changes to this property will trigger replacement. string
The location for the resource
Name Changes to this property will trigger replacement. string
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
Parameters Changes to this property will trigger replacement. []WorkflowTemplateParameterArgs
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
Placement Changes to this property will trigger replacement. WorkflowTemplatePlacementArgs
Required. WorkflowTemplate scheduling information.
Project Changes to this property will trigger replacement. string
The project for the resource
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
UpdateTime string
Output only. The time template was last updated.
Version Changes to this property will trigger replacement. int
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

createTime String
Output only. The time template was created.
dagTimeout Changes to this property will trigger replacement. String
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
effectiveLabels Changes to this property will trigger replacement. Map<String,String>
encryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfig
Optional. The encryption configuration for the workflow template.
jobs Changes to this property will trigger replacement. List<WorkflowTemplateJob>
Required. The Directed Acyclic Graph of Jobs to submit.
labels Map<String,String>
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
location Changes to this property will trigger replacement. String
The location for the resource
name Changes to this property will trigger replacement. String
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. List<WorkflowTemplateParameter>
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
placement Changes to this property will trigger replacement. WorkflowTemplatePlacement
Required. WorkflowTemplate scheduling information.
project Changes to this property will trigger replacement. String
The project for the resource
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
updateTime String
Output only. The time template was last updated.
version Changes to this property will trigger replacement. Integer
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

createTime string
Output only. The time template was created.
dagTimeout Changes to this property will trigger replacement. string
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
effectiveLabels Changes to this property will trigger replacement. {[key: string]: string}
encryptionConfig Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfig
Optional. The encryption configuration for the workflow template.
jobs Changes to this property will trigger replacement. WorkflowTemplateJob[]
Required. The Directed Acyclic Graph of Jobs to submit.
labels {[key: string]: string}
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
location Changes to this property will trigger replacement. string
The location for the resource
name Changes to this property will trigger replacement. string
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. WorkflowTemplateParameter[]
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
placement Changes to this property will trigger replacement. WorkflowTemplatePlacement
Required. WorkflowTemplate scheduling information.
project Changes to this property will trigger replacement. string
The project for the resource
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
updateTime string
Output only. The time template was last updated.
version Changes to this property will trigger replacement. number
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

create_time str
Output only. The time template was created.
dag_timeout Changes to this property will trigger replacement. str
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
effective_labels Changes to this property will trigger replacement. Mapping[str, str]
encryption_config Changes to this property will trigger replacement. WorkflowTemplateEncryptionConfigArgs
Optional. The encryption configuration for the workflow template.
jobs Changes to this property will trigger replacement. Sequence[WorkflowTemplateJobArgs]
Required. The Directed Acyclic Graph of Jobs to submit.
labels Mapping[str, str]
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
location Changes to this property will trigger replacement. str
The location for the resource
name Changes to this property will trigger replacement. str
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. Sequence[WorkflowTemplateParameterArgs]
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
placement Changes to this property will trigger replacement. WorkflowTemplatePlacementArgs
Required. WorkflowTemplate scheduling information.
project Changes to this property will trigger replacement. str
The project for the resource
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
update_time str
Output only. The time template was last updated.
version Changes to this property will trigger replacement. int
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

createTime String
Output only. The time template was created.
dagTimeout Changes to this property will trigger replacement. String
Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
effectiveLabels Changes to this property will trigger replacement. Map<String>
encryptionConfig Changes to this property will trigger replacement. Property Map
Optional. The encryption configuration for the workflow template.
jobs Changes to this property will trigger replacement. List<Property Map>
Required. The Directed Acyclic Graph of Jobs to submit.
labels Map<String>
Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035. No more than 32 labels can be associated with a template. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.
location Changes to this property will trigger replacement. String
The location for the resource
name Changes to this property will trigger replacement. String
Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}
parameters Changes to this property will trigger replacement. List<Property Map>
Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
placement Changes to this property will trigger replacement. Property Map
Required. WorkflowTemplate scheduling information.
project Changes to this property will trigger replacement. String
The project for the resource
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
updateTime String
Output only. The time template was last updated.
version Changes to this property will trigger replacement. Number
Output only. The current version of this workflow template.

Deprecated: version is not useful as a configurable field, and will be removed in the future.

Supporting Types

WorkflowTemplateEncryptionConfig
, WorkflowTemplateEncryptionConfigArgs

KmsKey Changes to this property will trigger replacement. string
Optional. The Cloud KMS key name to use for encryption.
KmsKey Changes to this property will trigger replacement. string
Optional. The Cloud KMS key name to use for encryption.
kmsKey Changes to this property will trigger replacement. String
Optional. The Cloud KMS key name to use for encryption.
kmsKey Changes to this property will trigger replacement. string
Optional. The Cloud KMS key name to use for encryption.
kms_key Changes to this property will trigger replacement. str
Optional. The Cloud KMS key name to use for encryption.
kmsKey Changes to this property will trigger replacement. String
Optional. The Cloud KMS key name to use for encryption.

WorkflowTemplateJob
, WorkflowTemplateJobArgs

StepId
This property is required.
Changes to this property will trigger replacement.
string
Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
HadoopJob Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJob
Job is a Hadoop job.
HiveJob Changes to this property will trigger replacement. WorkflowTemplateJobHiveJob
Job is a Hive job.
Labels Changes to this property will trigger replacement. Dictionary<string, string>
The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
PigJob Changes to this property will trigger replacement. WorkflowTemplateJobPigJob
Job is a Pig job.
PrerequisiteStepIds Changes to this property will trigger replacement. List<string>
The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
PrestoJob Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJob
Job is a Presto job.
PysparkJob Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJob
Job is a PySpark job.
Scheduling Changes to this property will trigger replacement. WorkflowTemplateJobScheduling
Job scheduling configuration.
SparkJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkJob
Job is a Spark job.
SparkRJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJob
Job is a SparkR job.
SparkSqlJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJob
Job is a SparkSql job.
StepId
This property is required.
Changes to this property will trigger replacement.
string
Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
HadoopJob Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJob
Job is a Hadoop job.
HiveJob Changes to this property will trigger replacement. WorkflowTemplateJobHiveJob
Job is a Hive job.
Labels Changes to this property will trigger replacement. map[string]string
The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
PigJob Changes to this property will trigger replacement. WorkflowTemplateJobPigJob
Job is a Pig job.
PrerequisiteStepIds Changes to this property will trigger replacement. []string
The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
PrestoJob Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJob
Job is a Presto job.
PysparkJob Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJob
Job is a PySpark job.
Scheduling Changes to this property will trigger replacement. WorkflowTemplateJobScheduling
Job scheduling configuration.
SparkJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkJob
Job is a Spark job.
SparkRJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJob
Job is a SparkR job.
SparkSqlJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJob
Job is a SparkSql job.
stepId
This property is required.
Changes to this property will trigger replacement.
String
Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
hadoopJob Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJob
Job is a Hadoop job.
hiveJob Changes to this property will trigger replacement. WorkflowTemplateJobHiveJob
Job is a Hive job.
labels Changes to this property will trigger replacement. Map<String,String>
The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
pigJob Changes to this property will trigger replacement. WorkflowTemplateJobPigJob
Job is a Pig job.
prerequisiteStepIds Changes to this property will trigger replacement. List<String>
The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
prestoJob Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJob
Job is a Presto job.
pysparkJob Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJob
Job is a PySpark job.
scheduling Changes to this property will trigger replacement. WorkflowTemplateJobScheduling
Job scheduling configuration.
sparkJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkJob
Job is a Spark job.
sparkRJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJob
Job is a SparkR job.
sparkSqlJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJob
Job is a SparkSql job.
stepId
This property is required.
Changes to this property will trigger replacement.
string
Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
hadoopJob Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJob
Job is a Hadoop job.
hiveJob Changes to this property will trigger replacement. WorkflowTemplateJobHiveJob
Job is a Hive job.
labels Changes to this property will trigger replacement. {[key: string]: string}
The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
pigJob Changes to this property will trigger replacement. WorkflowTemplateJobPigJob
Job is a Pig job.
prerequisiteStepIds Changes to this property will trigger replacement. string[]
The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
prestoJob Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJob
Job is a Presto job.
pysparkJob Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJob
Job is a PySpark job.
scheduling Changes to this property will trigger replacement. WorkflowTemplateJobScheduling
Job scheduling configuration.
sparkJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkJob
Job is a Spark job.
sparkRJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJob
Job is a SparkR job.
sparkSqlJob Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJob
Job is a SparkSql job.
step_id
This property is required.
Changes to this property will trigger replacement.
str
Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
hadoop_job Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJob
Job is a Hadoop job.
hive_job Changes to this property will trigger replacement. WorkflowTemplateJobHiveJob
Job is a Hive job.
labels Changes to this property will trigger replacement. Mapping[str, str]
The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
pig_job Changes to this property will trigger replacement. WorkflowTemplateJobPigJob
Job is a Pig job.
prerequisite_step_ids Changes to this property will trigger replacement. Sequence[str]
The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
presto_job Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJob
Job is a Presto job.
pyspark_job Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJob
Job is a PySpark job.
scheduling Changes to this property will trigger replacement. WorkflowTemplateJobScheduling
Job scheduling configuration.
spark_job Changes to this property will trigger replacement. WorkflowTemplateJobSparkJob
Job is a Spark job.
spark_r_job Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJob
Job is a SparkR job.
spark_sql_job Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJob
Job is a SparkSql job.
stepId
This property is required.
Changes to this property will trigger replacement.
String
Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
hadoopJob Changes to this property will trigger replacement. Property Map
Job is a Hadoop job.
hiveJob Changes to this property will trigger replacement. Property Map
Job is a Hive job.
labels Changes to this property will trigger replacement. Map<String>
The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
pigJob Changes to this property will trigger replacement. Property Map
Job is a Pig job.
prerequisiteStepIds Changes to this property will trigger replacement. List<String>
The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
prestoJob Changes to this property will trigger replacement. Property Map
Job is a Presto job.
pysparkJob Changes to this property will trigger replacement. Property Map
Job is a PySpark job.
scheduling Changes to this property will trigger replacement. Property Map
Job scheduling configuration.
sparkJob Changes to this property will trigger replacement. Property Map
Job is a Spark job.
sparkRJob Changes to this property will trigger replacement. Property Map
Job is a SparkR job.
sparkSqlJob Changes to this property will trigger replacement. Property Map
Job is a SparkSql job.

WorkflowTemplateJobHadoopJob
, WorkflowTemplateJobHadoopJobArgs

ArchiveUris Changes to this property will trigger replacement. List<string>
HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
Args Changes to this property will trigger replacement. List<string>
The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. List<string>
HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
JarFileUris Changes to this property will trigger replacement. List<string>
Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJobLoggingConfig
The runtime log config for job execution.
MainClass Changes to this property will trigger replacement. string
The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris.
MainJarFileUri Changes to this property will trigger replacement. string
The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
ArchiveUris Changes to this property will trigger replacement. []string
HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
Args Changes to this property will trigger replacement. []string
The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. []string
HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
JarFileUris Changes to this property will trigger replacement. []string
Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJobLoggingConfig
The runtime log config for job execution.
MainClass Changes to this property will trigger replacement. string
The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris.
MainJarFileUri Changes to this property will trigger replacement. string
The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. List<String>
Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJobLoggingConfig
The runtime log config for job execution.
mainClass Changes to this property will trigger replacement. String
The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris.
mainJarFileUri Changes to this property will trigger replacement. String
The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
archiveUris Changes to this property will trigger replacement. string[]
HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
args Changes to this property will trigger replacement. string[]
The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. string[]
HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. string[]
Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJobLoggingConfig
The runtime log config for job execution.
mainClass Changes to this property will trigger replacement. string
The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris.
mainJarFileUri Changes to this property will trigger replacement. string
The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
archive_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
args Changes to this property will trigger replacement. Sequence[str]
The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
jar_file_uris Changes to this property will trigger replacement. Sequence[str]
Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
logging_config Changes to this property will trigger replacement. WorkflowTemplateJobHadoopJobLoggingConfig
The runtime log config for job execution.
main_class Changes to this property will trigger replacement. str
The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris.
main_jar_file_uri Changes to this property will trigger replacement. str
The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. List<String>
Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
loggingConfig Changes to this property will trigger replacement. Property Map
The runtime log config for job execution.
mainClass Changes to this property will trigger replacement. String
The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris.
mainJarFileUri Changes to this property will trigger replacement. String
The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.

WorkflowTemplateJobHadoopJobLoggingConfig
, WorkflowTemplateJobHadoopJobLoggingConfigArgs

DriverLogLevels Changes to this property will trigger replacement. Dictionary<string, string>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
DriverLogLevels Changes to this property will trigger replacement. map[string]string
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String,String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. {[key: string]: string}
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driver_log_levels Changes to this property will trigger replacement. Mapping[str, str]
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

WorkflowTemplateJobHiveJob
, WorkflowTemplateJobHiveJobArgs

ContinueOnFailure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
JarFileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains Hive queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobHiveJobQueryList
A list of queries.
ScriptVariables Changes to this property will trigger replacement. Dictionary<string, string>
Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
ContinueOnFailure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
JarFileUris Changes to this property will trigger replacement. []string
HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains Hive queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobHiveJobQueryList
A list of queries.
ScriptVariables Changes to this property will trigger replacement. map[string]string
Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
continueOnFailure Changes to this property will trigger replacement. Boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains Hive queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobHiveJobQueryList
A list of queries.
scriptVariables Changes to this property will trigger replacement. Map<String,String>
Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
continueOnFailure Changes to this property will trigger replacement. boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jarFileUris Changes to this property will trigger replacement. string[]
HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
queryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains Hive queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobHiveJobQueryList
A list of queries.
scriptVariables Changes to this property will trigger replacement. {[key: string]: string}
Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
continue_on_failure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jar_file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
query_file_uri Changes to this property will trigger replacement. str
The HCFS URI of the script that contains Hive queries.
query_list Changes to this property will trigger replacement. WorkflowTemplateJobHiveJobQueryList
A list of queries.
script_variables Changes to this property will trigger replacement. Mapping[str, str]
Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
continueOnFailure Changes to this property will trigger replacement. Boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains Hive queries.
queryList Changes to this property will trigger replacement. Property Map
A list of queries.
scriptVariables Changes to this property will trigger replacement. Map<String>
Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).

WorkflowTemplateJobHiveJobQueryList
, WorkflowTemplateJobHiveJobQueryListArgs

Queries
This property is required.
Changes to this property will trigger replacement.
List<string>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
Queries
This property is required.
Changes to this property will trigger replacement.
[]string
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
string[]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
Sequence[str]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }

WorkflowTemplateJobPigJob
, WorkflowTemplateJobPigJobArgs

ContinueOnFailure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
JarFileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPigJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains the Pig queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobPigJobQueryList
A list of queries.
ScriptVariables Changes to this property will trigger replacement. Dictionary<string, string>
Mapping of query variable names to values (equivalent to the Pig command: name=).
ContinueOnFailure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
JarFileUris Changes to this property will trigger replacement. []string
HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPigJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains the Pig queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobPigJobQueryList
A list of queries.
ScriptVariables Changes to this property will trigger replacement. map[string]string
Mapping of query variable names to values (equivalent to the Pig command: name=).
continueOnFailure Changes to this property will trigger replacement. Boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPigJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains the Pig queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobPigJobQueryList
A list of queries.
scriptVariables Changes to this property will trigger replacement. Map<String,String>
Mapping of query variable names to values (equivalent to the Pig command: name=).
continueOnFailure Changes to this property will trigger replacement. boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jarFileUris Changes to this property will trigger replacement. string[]
HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPigJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
queryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains the Pig queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobPigJobQueryList
A list of queries.
scriptVariables Changes to this property will trigger replacement. {[key: string]: string}
Mapping of query variable names to values (equivalent to the Pig command: name=).
continue_on_failure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jar_file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
logging_config Changes to this property will trigger replacement. WorkflowTemplateJobPigJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
query_file_uri Changes to this property will trigger replacement. str
The HCFS URI of the script that contains the Pig queries.
query_list Changes to this property will trigger replacement. WorkflowTemplateJobPigJobQueryList
A list of queries.
script_variables Changes to this property will trigger replacement. Mapping[str, str]
Mapping of query variable names to values (equivalent to the Pig command: name=).
continueOnFailure Changes to this property will trigger replacement. Boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
loggingConfig Changes to this property will trigger replacement. Property Map
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains the Pig queries.
queryList Changes to this property will trigger replacement. Property Map
A list of queries.
scriptVariables Changes to this property will trigger replacement. Map<String>
Mapping of query variable names to values (equivalent to the Pig command: name=).

WorkflowTemplateJobPigJobLoggingConfig
, WorkflowTemplateJobPigJobLoggingConfigArgs

DriverLogLevels Changes to this property will trigger replacement. Dictionary<string, string>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
DriverLogLevels Changes to this property will trigger replacement. map[string]string
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String,String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. {[key: string]: string}
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driver_log_levels Changes to this property will trigger replacement. Mapping[str, str]
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

WorkflowTemplateJobPigJobQueryList
, WorkflowTemplateJobPigJobQueryListArgs

Queries
This property is required.
Changes to this property will trigger replacement.
List<string>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
Queries
This property is required.
Changes to this property will trigger replacement.
[]string
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
string[]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
Sequence[str]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }

WorkflowTemplateJobPrestoJob
, WorkflowTemplateJobPrestoJobArgs

ClientTags Changes to this property will trigger replacement. List<string>
Presto client tags to attach to this query
ContinueOnFailure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobLoggingConfig
The runtime log config for job execution.
OutputFormat Changes to this property will trigger replacement. string
The format in which query output will be displayed. See the Presto documentation for supported output formats
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names to values. Used to set Presto (https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains SQL queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobQueryList
A list of queries.
ClientTags Changes to this property will trigger replacement. []string
Presto client tags to attach to this query
ContinueOnFailure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobLoggingConfig
The runtime log config for job execution.
OutputFormat Changes to this property will trigger replacement. string
The format in which query output will be displayed. See the Presto documentation for supported output formats
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names to values. Used to set Presto (https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains SQL queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobQueryList
A list of queries.
clientTags Changes to this property will trigger replacement. List<String>
Presto client tags to attach to this query
continueOnFailure Changes to this property will trigger replacement. Boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobLoggingConfig
The runtime log config for job execution.
outputFormat Changes to this property will trigger replacement. String
The format in which query output will be displayed. See the Presto documentation for supported output formats
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names to values. Used to set Presto (https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains SQL queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobQueryList
A list of queries.
clientTags Changes to this property will trigger replacement. string[]
Presto client tags to attach to this query
continueOnFailure Changes to this property will trigger replacement. boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobLoggingConfig
The runtime log config for job execution.
outputFormat Changes to this property will trigger replacement. string
The format in which query output will be displayed. See the Presto documentation for supported output formats
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names to values. Used to set Presto (https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
queryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains SQL queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobQueryList
A list of queries.
client_tags Changes to this property will trigger replacement. Sequence[str]
Presto client tags to attach to this query
continue_on_failure Changes to this property will trigger replacement. bool
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
logging_config Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobLoggingConfig
The runtime log config for job execution.
output_format Changes to this property will trigger replacement. str
The format in which query output will be displayed. See the Presto documentation for supported output formats
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names to values. Used to set Presto (https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
query_file_uri Changes to this property will trigger replacement. str
The HCFS URI of the script that contains SQL queries.
query_list Changes to this property will trigger replacement. WorkflowTemplateJobPrestoJobQueryList
A list of queries.
clientTags Changes to this property will trigger replacement. List<String>
Presto client tags to attach to this query
continueOnFailure Changes to this property will trigger replacement. Boolean
Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries.
loggingConfig Changes to this property will trigger replacement. Property Map
The runtime log config for job execution.
outputFormat Changes to this property will trigger replacement. String
The format in which query output will be displayed. See the Presto documentation for supported output formats
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names to values. Used to set Presto (https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains SQL queries.
queryList Changes to this property will trigger replacement. Property Map
A list of queries.

WorkflowTemplateJobPrestoJobLoggingConfig
, WorkflowTemplateJobPrestoJobLoggingConfigArgs

DriverLogLevels Changes to this property will trigger replacement. Dictionary<string, string>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
DriverLogLevels Changes to this property will trigger replacement. map[string]string
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String,String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. {[key: string]: string}
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driver_log_levels Changes to this property will trigger replacement. Mapping[str, str]
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

WorkflowTemplateJobPrestoJobQueryList
, WorkflowTemplateJobPrestoJobQueryListArgs

Queries
This property is required.
Changes to this property will trigger replacement.
List<string>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
Queries
This property is required.
Changes to this property will trigger replacement.
[]string
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
string[]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
Sequence[str]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }

WorkflowTemplateJobPysparkJob
, WorkflowTemplateJobPysparkJobArgs

MainPythonFileUri
This property is required.
Changes to this property will trigger replacement.
string
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
ArchiveUris Changes to this property will trigger replacement. List<string>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
Args Changes to this property will trigger replacement. List<string>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
JarFileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
PythonFileUris Changes to this property will trigger replacement. List<string>
HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
MainPythonFileUri
This property is required.
Changes to this property will trigger replacement.
string
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
ArchiveUris Changes to this property will trigger replacement. []string
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
Args Changes to this property will trigger replacement. []string
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. []string
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
JarFileUris Changes to this property will trigger replacement. []string
HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
PythonFileUris Changes to this property will trigger replacement. []string
HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
mainPythonFileUri
This property is required.
Changes to this property will trigger replacement.
String
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
pythonFileUris Changes to this property will trigger replacement. List<String>
HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
mainPythonFileUri
This property is required.
Changes to this property will trigger replacement.
string
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
archiveUris Changes to this property will trigger replacement. string[]
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. string[]
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. string[]
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. string[]
HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
pythonFileUris Changes to this property will trigger replacement. string[]
HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
main_python_file_uri
This property is required.
Changes to this property will trigger replacement.
str
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
archive_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. Sequence[str]
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jar_file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
logging_config Changes to this property will trigger replacement. WorkflowTemplateJobPysparkJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
python_file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
mainPythonFileUri
This property is required.
Changes to this property will trigger replacement.
String
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
loggingConfig Changes to this property will trigger replacement. Property Map
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
pythonFileUris Changes to this property will trigger replacement. List<String>
HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

WorkflowTemplateJobPysparkJobLoggingConfig
, WorkflowTemplateJobPysparkJobLoggingConfigArgs

DriverLogLevels Changes to this property will trigger replacement. Dictionary<string, string>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
DriverLogLevels Changes to this property will trigger replacement. map[string]string
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String,String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. {[key: string]: string}
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driver_log_levels Changes to this property will trigger replacement. Mapping[str, str]
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

WorkflowTemplateJobScheduling
, WorkflowTemplateJobSchedulingArgs

MaxFailuresPerHour Changes to this property will trigger replacement. int
Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
MaxFailuresTotal Changes to this property will trigger replacement. int
Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240
MaxFailuresPerHour Changes to this property will trigger replacement. int
Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
MaxFailuresTotal Changes to this property will trigger replacement. int
Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240
maxFailuresPerHour Changes to this property will trigger replacement. Integer
Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
maxFailuresTotal Changes to this property will trigger replacement. Integer
Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240
maxFailuresPerHour Changes to this property will trigger replacement. number
Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
maxFailuresTotal Changes to this property will trigger replacement. number
Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240
max_failures_per_hour Changes to this property will trigger replacement. int
Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
max_failures_total Changes to this property will trigger replacement. int
Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240
maxFailuresPerHour Changes to this property will trigger replacement. Number
Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
maxFailuresTotal Changes to this property will trigger replacement. Number
Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240

WorkflowTemplateJobSparkJob
, WorkflowTemplateJobSparkJobArgs

ArchiveUris Changes to this property will trigger replacement. List<string>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
Args Changes to this property will trigger replacement. List<string>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
JarFileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkJobLoggingConfig
The runtime log config for job execution.
MainClass Changes to this property will trigger replacement. string
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris.
MainJarFileUri Changes to this property will trigger replacement. string
The HCFS URI of the jar file that contains the main class.
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
ArchiveUris Changes to this property will trigger replacement. []string
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
Args Changes to this property will trigger replacement. []string
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. []string
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
JarFileUris Changes to this property will trigger replacement. []string
HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkJobLoggingConfig
The runtime log config for job execution.
MainClass Changes to this property will trigger replacement. string
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris.
MainJarFileUri Changes to this property will trigger replacement. string
The HCFS URI of the jar file that contains the main class.
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkJobLoggingConfig
The runtime log config for job execution.
mainClass Changes to this property will trigger replacement. String
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris.
mainJarFileUri Changes to this property will trigger replacement. String
The HCFS URI of the jar file that contains the main class.
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
archiveUris Changes to this property will trigger replacement. string[]
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. string[]
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. string[]
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. string[]
HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkJobLoggingConfig
The runtime log config for job execution.
mainClass Changes to this property will trigger replacement. string
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris.
mainJarFileUri Changes to this property will trigger replacement. string
The HCFS URI of the jar file that contains the main class.
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
archive_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. Sequence[str]
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jar_file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
logging_config Changes to this property will trigger replacement. WorkflowTemplateJobSparkJobLoggingConfig
The runtime log config for job execution.
main_class Changes to this property will trigger replacement. str
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris.
main_jar_file_uri Changes to this property will trigger replacement. str
The HCFS URI of the jar file that contains the main class.
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
loggingConfig Changes to this property will trigger replacement. Property Map
The runtime log config for job execution.
mainClass Changes to this property will trigger replacement. String
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris.
mainJarFileUri Changes to this property will trigger replacement. String
The HCFS URI of the jar file that contains the main class.
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

WorkflowTemplateJobSparkJobLoggingConfig
, WorkflowTemplateJobSparkJobLoggingConfigArgs

DriverLogLevels Changes to this property will trigger replacement. Dictionary<string, string>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
DriverLogLevels Changes to this property will trigger replacement. map[string]string
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String,String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. {[key: string]: string}
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driver_log_levels Changes to this property will trigger replacement. Mapping[str, str]
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

WorkflowTemplateJobSparkRJob
, WorkflowTemplateJobSparkRJobArgs

MainRFileUri
This property is required.
Changes to this property will trigger replacement.
string
Required. The HCFS URI of the main R file to use as the driver. Must be a .R file.
ArchiveUris Changes to this property will trigger replacement. List<string>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
Args Changes to this property will trigger replacement. List<string>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
MainRFileUri
This property is required.
Changes to this property will trigger replacement.
string
Required. The HCFS URI of the main R file to use as the driver. Must be a .R file.
ArchiveUris Changes to this property will trigger replacement. []string
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
Args Changes to this property will trigger replacement. []string
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
FileUris Changes to this property will trigger replacement. []string
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
mainRFileUri
This property is required.
Changes to this property will trigger replacement.
String
Required. The HCFS URI of the main R file to use as the driver. Must be a .R file.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
mainRFileUri
This property is required.
Changes to this property will trigger replacement.
string
Required. The HCFS URI of the main R file to use as the driver. Must be a .R file.
archiveUris Changes to this property will trigger replacement. string[]
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. string[]
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. string[]
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
main_r_file_uri
This property is required.
Changes to this property will trigger replacement.
str
Required. The HCFS URI of the main R file to use as the driver. Must be a .R file.
archive_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. Sequence[str]
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
logging_config Changes to this property will trigger replacement. WorkflowTemplateJobSparkRJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
mainRFileUri
This property is required.
Changes to this property will trigger replacement.
String
Required. The HCFS URI of the main R file to use as the driver. Must be a .R file.
archiveUris Changes to this property will trigger replacement. List<String>
HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
args Changes to this property will trigger replacement. List<String>
The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
fileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
loggingConfig Changes to this property will trigger replacement. Property Map
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

WorkflowTemplateJobSparkRJobLoggingConfig
, WorkflowTemplateJobSparkRJobLoggingConfigArgs

DriverLogLevels Changes to this property will trigger replacement. Dictionary<string, string>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
DriverLogLevels Changes to this property will trigger replacement. map[string]string
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String,String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. {[key: string]: string}
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driver_log_levels Changes to this property will trigger replacement. Mapping[str, str]
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

WorkflowTemplateJobSparkSqlJob
, WorkflowTemplateJobSparkSqlJobArgs

JarFileUris Changes to this property will trigger replacement. List<string>
HCFS URIs of jar files to be added to the Spark CLASSPATH.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. Dictionary<string, string>
A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains SQL queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobQueryList
A list of queries.
ScriptVariables Changes to this property will trigger replacement. Dictionary<string, string>
Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
JarFileUris Changes to this property will trigger replacement. []string
HCFS URIs of jar files to be added to the Spark CLASSPATH.
LoggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobLoggingConfig
The runtime log config for job execution.
Properties Changes to this property will trigger replacement. map[string]string
A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
QueryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains SQL queries.
QueryList Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobQueryList
A list of queries.
ScriptVariables Changes to this property will trigger replacement. map[string]string
Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to be added to the Spark CLASSPATH.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String,String>
A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains SQL queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobQueryList
A list of queries.
scriptVariables Changes to this property will trigger replacement. Map<String,String>
Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
jarFileUris Changes to this property will trigger replacement. string[]
HCFS URIs of jar files to be added to the Spark CLASSPATH.
loggingConfig Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. {[key: string]: string}
A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
queryFileUri Changes to this property will trigger replacement. string
The HCFS URI of the script that contains SQL queries.
queryList Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobQueryList
A list of queries.
scriptVariables Changes to this property will trigger replacement. {[key: string]: string}
Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
jar_file_uris Changes to this property will trigger replacement. Sequence[str]
HCFS URIs of jar files to be added to the Spark CLASSPATH.
logging_config Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobLoggingConfig
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Mapping[str, str]
A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
query_file_uri Changes to this property will trigger replacement. str
The HCFS URI of the script that contains SQL queries.
query_list Changes to this property will trigger replacement. WorkflowTemplateJobSparkSqlJobQueryList
A list of queries.
script_variables Changes to this property will trigger replacement. Mapping[str, str]
Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
jarFileUris Changes to this property will trigger replacement. List<String>
HCFS URIs of jar files to be added to the Spark CLASSPATH.
loggingConfig Changes to this property will trigger replacement. Property Map
The runtime log config for job execution.
properties Changes to this property will trigger replacement. Map<String>
A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
queryFileUri Changes to this property will trigger replacement. String
The HCFS URI of the script that contains SQL queries.
queryList Changes to this property will trigger replacement. Property Map
A list of queries.
scriptVariables Changes to this property will trigger replacement. Map<String>
Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

WorkflowTemplateJobSparkSqlJobLoggingConfig
, WorkflowTemplateJobSparkSqlJobLoggingConfigArgs

DriverLogLevels Changes to this property will trigger replacement. Dictionary<string, string>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
DriverLogLevels Changes to this property will trigger replacement. map[string]string
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String,String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. {[key: string]: string}
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driver_log_levels Changes to this property will trigger replacement. Mapping[str, str]
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
driverLogLevels Changes to this property will trigger replacement. Map<String>
The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

WorkflowTemplateJobSparkSqlJobQueryList
, WorkflowTemplateJobSparkSqlJobQueryListArgs

Queries
This property is required.
Changes to this property will trigger replacement.
List<string>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
Queries
This property is required.
Changes to this property will trigger replacement.
[]string
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
string[]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
Sequence[str]
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }
queries
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } }

WorkflowTemplateParameter
, WorkflowTemplateParameterArgs

Fields
This property is required.
Changes to this property will trigger replacement.
List<string>
Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args
Name
This property is required.
Changes to this property will trigger replacement.
string
Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
Description Changes to this property will trigger replacement. string
Brief description of the parameter. Must not exceed 1024 characters.
Validation Changes to this property will trigger replacement. WorkflowTemplateParameterValidation
Validation rules to be applied to this parameter's value.
Fields
This property is required.
Changes to this property will trigger replacement.
[]string
Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args
Name
This property is required.
Changes to this property will trigger replacement.
string
Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
Description Changes to this property will trigger replacement. string
Brief description of the parameter. Must not exceed 1024 characters.
Validation Changes to this property will trigger replacement. WorkflowTemplateParameterValidation
Validation rules to be applied to this parameter's value.
fields
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args
name
This property is required.
Changes to this property will trigger replacement.
String
Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
description Changes to this property will trigger replacement. String
Brief description of the parameter. Must not exceed 1024 characters.
validation Changes to this property will trigger replacement. WorkflowTemplateParameterValidation
Validation rules to be applied to this parameter's value.
fields
This property is required.
Changes to this property will trigger replacement.
string[]
Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args
name
This property is required.
Changes to this property will trigger replacement.
string
Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
description Changes to this property will trigger replacement. string
Brief description of the parameter. Must not exceed 1024 characters.
validation Changes to this property will trigger replacement. WorkflowTemplateParameterValidation
Validation rules to be applied to this parameter's value.
fields
This property is required.
Changes to this property will trigger replacement.
Sequence[str]
Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args
name
This property is required.
Changes to this property will trigger replacement.
str
Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
description Changes to this property will trigger replacement. str
Brief description of the parameter. Must not exceed 1024 characters.
validation Changes to this property will trigger replacement. WorkflowTemplateParameterValidation
Validation rules to be applied to this parameter's value.
fields
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args
name
This property is required.
Changes to this property will trigger replacement.
String
Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
description Changes to this property will trigger replacement. String
Brief description of the parameter. Must not exceed 1024 characters.
validation Changes to this property will trigger replacement. Property Map
Validation rules to be applied to this parameter's value.

WorkflowTemplateParameterValidation
, WorkflowTemplateParameterValidationArgs

Regex Changes to this property will trigger replacement. WorkflowTemplateParameterValidationRegex
Validation based on regular expressions.
Values Changes to this property will trigger replacement. WorkflowTemplateParameterValidationValues
Validation based on a list of allowed values.
Regex Changes to this property will trigger replacement. WorkflowTemplateParameterValidationRegex
Validation based on regular expressions.
Values Changes to this property will trigger replacement. WorkflowTemplateParameterValidationValues
Validation based on a list of allowed values.
regex Changes to this property will trigger replacement. WorkflowTemplateParameterValidationRegex
Validation based on regular expressions.
values Changes to this property will trigger replacement. WorkflowTemplateParameterValidationValues
Validation based on a list of allowed values.
regex Changes to this property will trigger replacement. WorkflowTemplateParameterValidationRegex
Validation based on regular expressions.
values Changes to this property will trigger replacement. WorkflowTemplateParameterValidationValues
Validation based on a list of allowed values.
regex Changes to this property will trigger replacement. WorkflowTemplateParameterValidationRegex
Validation based on regular expressions.
values Changes to this property will trigger replacement. WorkflowTemplateParameterValidationValues
Validation based on a list of allowed values.
regex Changes to this property will trigger replacement. Property Map
Validation based on regular expressions.
values Changes to this property will trigger replacement. Property Map
Validation based on a list of allowed values.

WorkflowTemplateParameterValidationRegex
, WorkflowTemplateParameterValidationRegexArgs

Regexes
This property is required.
Changes to this property will trigger replacement.
List<string>
Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient).
Regexes
This property is required.
Changes to this property will trigger replacement.
[]string
Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient).
regexes
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient).
regexes
This property is required.
Changes to this property will trigger replacement.
string[]
Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient).
regexes
This property is required.
Changes to this property will trigger replacement.
Sequence[str]
Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient).
regexes
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient).

WorkflowTemplateParameterValidationValues
, WorkflowTemplateParameterValidationValuesArgs

Values
This property is required.
Changes to this property will trigger replacement.
List<string>
Required. List of allowed values for the parameter.
Values
This property is required.
Changes to this property will trigger replacement.
[]string
Required. List of allowed values for the parameter.
values
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. List of allowed values for the parameter.
values
This property is required.
Changes to this property will trigger replacement.
string[]
Required. List of allowed values for the parameter.
values
This property is required.
Changes to this property will trigger replacement.
Sequence[str]
Required. List of allowed values for the parameter.
values
This property is required.
Changes to this property will trigger replacement.
List<String>
Required. List of allowed values for the parameter.

WorkflowTemplatePlacement
, WorkflowTemplatePlacementArgs

ClusterSelector Changes to this property will trigger replacement. WorkflowTemplatePlacementClusterSelector
A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted.
ManagedCluster Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedCluster
A cluster that is managed by the workflow.
ClusterSelector Changes to this property will trigger replacement. WorkflowTemplatePlacementClusterSelector
A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted.
ManagedCluster Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedCluster
A cluster that is managed by the workflow.
clusterSelector Changes to this property will trigger replacement. WorkflowTemplatePlacementClusterSelector
A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted.
managedCluster Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedCluster
A cluster that is managed by the workflow.
clusterSelector Changes to this property will trigger replacement. WorkflowTemplatePlacementClusterSelector
A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted.
managedCluster Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedCluster
A cluster that is managed by the workflow.
cluster_selector Changes to this property will trigger replacement. WorkflowTemplatePlacementClusterSelector
A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted.
managed_cluster Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedCluster
A cluster that is managed by the workflow.
clusterSelector Changes to this property will trigger replacement. Property Map
A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted.
managedCluster Changes to this property will trigger replacement. Property Map
A cluster that is managed by the workflow.

WorkflowTemplatePlacementClusterSelector
, WorkflowTemplatePlacementClusterSelectorArgs

ClusterLabels
This property is required.
Changes to this property will trigger replacement.
Dictionary<string, string>
Required. The cluster labels. Cluster must have all labels to match.
Zone Changes to this property will trigger replacement. string
The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used.
ClusterLabels
This property is required.
Changes to this property will trigger replacement.
map[string]string
Required. The cluster labels. Cluster must have all labels to match.
Zone Changes to this property will trigger replacement. string
The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used.
clusterLabels
This property is required.
Changes to this property will trigger replacement.
Map<String,String>
Required. The cluster labels. Cluster must have all labels to match.
zone Changes to this property will trigger replacement. String
The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used.
clusterLabels
This property is required.
Changes to this property will trigger replacement.
{[key: string]: string}
Required. The cluster labels. Cluster must have all labels to match.
zone Changes to this property will trigger replacement. string
The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used.
cluster_labels
This property is required.
Changes to this property will trigger replacement.
Mapping[str, str]
Required. The cluster labels. Cluster must have all labels to match.
zone Changes to this property will trigger replacement. str
The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used.
clusterLabels
This property is required.
Changes to this property will trigger replacement.
Map<String>
Required. The cluster labels. Cluster must have all labels to match.
zone Changes to this property will trigger replacement. String
The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used.

WorkflowTemplatePlacementManagedCluster
, WorkflowTemplatePlacementManagedClusterArgs

ClusterName
This property is required.
Changes to this property will trigger replacement.
string
Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters.
Config
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacementManagedClusterConfig
Required. The cluster configuration.
Labels Changes to this property will trigger replacement. Dictionary<string, string>
The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
ClusterName
This property is required.
Changes to this property will trigger replacement.
string
Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters.
Config
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacementManagedClusterConfig
Required. The cluster configuration.
Labels Changes to this property will trigger replacement. map[string]string
The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
clusterName
This property is required.
Changes to this property will trigger replacement.
String
Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters.
config
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacementManagedClusterConfig
Required. The cluster configuration.
labels Changes to this property will trigger replacement. Map<String,String>
The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
clusterName
This property is required.
Changes to this property will trigger replacement.
string
Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters.
config
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacementManagedClusterConfig
Required. The cluster configuration.
labels Changes to this property will trigger replacement. {[key: string]: string}
The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
cluster_name
This property is required.
Changes to this property will trigger replacement.
str
Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters.
config
This property is required.
Changes to this property will trigger replacement.
WorkflowTemplatePlacementManagedClusterConfig
Required. The cluster configuration.
labels Changes to this property will trigger replacement. Mapping[str, str]
The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
clusterName
This property is required.
Changes to this property will trigger replacement.
String
Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters.
config
This property is required.
Changes to this property will trigger replacement.
Property Map
Required. The cluster configuration.
labels Changes to this property will trigger replacement. Map<String>
The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.

WorkflowTemplatePlacementManagedClusterConfig
, WorkflowTemplatePlacementManagedClusterConfigArgs

AutoscalingConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig
Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset.
EncryptionConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEncryptionConfig
Encryption settings for the cluster.
EndpointConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEndpointConfig
Port/endpoint configuration for this cluster
GceClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfig
The shared Compute Engine config settings for all instances in a cluster.
GkeClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfig
The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. Setting this is considered mutually exclusive with Compute Engine-based options such as gce_cluster_config, master_config, worker_config, secondary_worker_config, and autoscaling_config.
InitializationActions Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigInitializationAction>
Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi
LifecycleConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigLifecycleConfig
Lifecycle setting for the cluster.
MasterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfig
The Compute Engine config settings for additional worker instances in a cluster.
MetastoreConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMetastoreConfig
Metastore configuration.
SecondaryWorkerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.
SecurityConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfig
Security settings for the cluster.
SoftwareConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSoftwareConfig
The config settings for software inside the cluster.
StagingBucket Changes to this property will trigger replacement. string
A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see Dataproc staging and temp buckets).
TempBucket Changes to this property will trigger replacement. string
A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
WorkerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.


AutoscalingConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig
Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset.
EncryptionConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEncryptionConfig
Encryption settings for the cluster.
EndpointConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEndpointConfig
Port/endpoint configuration for this cluster
GceClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfig
The shared Compute Engine config settings for all instances in a cluster.
GkeClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfig
The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. Setting this is considered mutually exclusive with Compute Engine-based options such as gce_cluster_config, master_config, worker_config, secondary_worker_config, and autoscaling_config.
InitializationActions Changes to this property will trigger replacement. []WorkflowTemplatePlacementManagedClusterConfigInitializationAction
Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi
LifecycleConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigLifecycleConfig
Lifecycle setting for the cluster.
MasterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfig
The Compute Engine config settings for additional worker instances in a cluster.
MetastoreConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMetastoreConfig
Metastore configuration.
SecondaryWorkerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.
SecurityConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfig
Security settings for the cluster.
SoftwareConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSoftwareConfig
The config settings for software inside the cluster.
StagingBucket Changes to this property will trigger replacement. string
A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see Dataproc staging and temp buckets).
TempBucket Changes to this property will trigger replacement. string
A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
WorkerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.


autoscalingConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig
Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset.
encryptionConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEncryptionConfig
Encryption settings for the cluster.
endpointConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEndpointConfig
Port/endpoint configuration for this cluster
gceClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfig
The shared Compute Engine config settings for all instances in a cluster.
gkeClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfig
The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. Setting this is considered mutually exclusive with Compute Engine-based options such as gce_cluster_config, master_config, worker_config, secondary_worker_config, and autoscaling_config.
initializationActions Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigInitializationAction>
Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi
lifecycleConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigLifecycleConfig
Lifecycle setting for the cluster.
masterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfig
The Compute Engine config settings for additional worker instances in a cluster.
metastoreConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMetastoreConfig
Metastore configuration.
secondaryWorkerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.
securityConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfig
Security settings for the cluster.
softwareConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSoftwareConfig
The config settings for software inside the cluster.
stagingBucket Changes to this property will trigger replacement. String
A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see Dataproc staging and temp buckets).
tempBucket Changes to this property will trigger replacement. String
A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
workerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.


autoscalingConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig
Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset.
encryptionConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEncryptionConfig
Encryption settings for the cluster.
endpointConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEndpointConfig
Port/endpoint configuration for this cluster
gceClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfig
The shared Compute Engine config settings for all instances in a cluster.
gkeClusterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfig
The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. Setting this is considered mutually exclusive with Compute Engine-based options such as gce_cluster_config, master_config, worker_config, secondary_worker_config, and autoscaling_config.
initializationActions Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigInitializationAction[]
Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi
lifecycleConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigLifecycleConfig
Lifecycle setting for the cluster.
masterConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfig
The Compute Engine config settings for additional worker instances in a cluster.
metastoreConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMetastoreConfig
Metastore configuration.
secondaryWorkerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.
securityConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfig
Security settings for the cluster.
softwareConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSoftwareConfig
The config settings for software inside the cluster.
stagingBucket Changes to this property will trigger replacement. string
A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see Dataproc staging and temp buckets).
tempBucket Changes to this property will trigger replacement. string
A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
workerConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.


autoscaling_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig
Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset.
encryption_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEncryptionConfig
Encryption settings for the cluster.
endpoint_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigEndpointConfig
Port/endpoint configuration for this cluster
gce_cluster_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfig
The shared Compute Engine config settings for all instances in a cluster.
gke_cluster_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfig
The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. Setting this is considered mutually exclusive with Compute Engine-based options such as gce_cluster_config, master_config, worker_config, secondary_worker_config, and autoscaling_config.
initialization_actions Changes to this property will trigger replacement. Sequence[WorkflowTemplatePlacementManagedClusterConfigInitializationAction]
Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi
lifecycle_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigLifecycleConfig
Lifecycle setting for the cluster.
master_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfig
The Compute Engine config settings for additional worker instances in a cluster.
metastore_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMetastoreConfig
Metastore configuration.
secondary_worker_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.
security_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfig
Security settings for the cluster.
software_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSoftwareConfig
The config settings for software inside the cluster.
staging_bucket Changes to this property will trigger replacement. str
A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see Dataproc staging and temp buckets).
temp_bucket Changes to this property will trigger replacement. str
A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
worker_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfig
The Compute Engine config settings for additional worker instances in a cluster.


autoscalingConfig Changes to this property will trigger replacement. Property Map
Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset.
encryptionConfig Changes to this property will trigger replacement. Property Map
Encryption settings for the cluster.
endpointConfig Changes to this property will trigger replacement. Property Map
Port/endpoint configuration for this cluster
gceClusterConfig Changes to this property will trigger replacement. Property Map
The shared Compute Engine config settings for all instances in a cluster.
gkeClusterConfig Changes to this property will trigger replacement. Property Map
The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. Setting this is considered mutually exclusive with Compute Engine-based options such as gce_cluster_config, master_config, worker_config, secondary_worker_config, and autoscaling_config.
initializationActions Changes to this property will trigger replacement. List<Property Map>
Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi
lifecycleConfig Changes to this property will trigger replacement. Property Map
Lifecycle setting for the cluster.
masterConfig Changes to this property will trigger replacement. Property Map
The Compute Engine config settings for additional worker instances in a cluster.
metastoreConfig Changes to this property will trigger replacement. Property Map
Metastore configuration.
secondaryWorkerConfig Changes to this property will trigger replacement. Property Map
The Compute Engine config settings for additional worker instances in a cluster.
securityConfig Changes to this property will trigger replacement. Property Map
Security settings for the cluster.
softwareConfig Changes to this property will trigger replacement. Property Map
The config settings for software inside the cluster.
stagingBucket Changes to this property will trigger replacement. String
A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see Dataproc staging and temp buckets).
tempBucket Changes to this property will trigger replacement. String
A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
workerConfig Changes to this property will trigger replacement. Property Map
The Compute Engine config settings for additional worker instances in a cluster.


WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig
, WorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigArgs

Policy Changes to this property will trigger replacement. string
The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region.
Policy Changes to this property will trigger replacement. string
The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region.
policy Changes to this property will trigger replacement. String
The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region.
policy Changes to this property will trigger replacement. string
The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region.
policy Changes to this property will trigger replacement. str
The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region.
policy Changes to this property will trigger replacement. String
The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region.

WorkflowTemplatePlacementManagedClusterConfigEncryptionConfig
, WorkflowTemplatePlacementManagedClusterConfigEncryptionConfigArgs

GcePdKmsKeyName Changes to this property will trigger replacement. string
The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
GcePdKmsKeyName Changes to this property will trigger replacement. string
The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
gcePdKmsKeyName Changes to this property will trigger replacement. String
The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
gcePdKmsKeyName Changes to this property will trigger replacement. string
The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
gce_pd_kms_key_name Changes to this property will trigger replacement. str
The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
gcePdKmsKeyName Changes to this property will trigger replacement. String
The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.

WorkflowTemplatePlacementManagedClusterConfigEndpointConfig
, WorkflowTemplatePlacementManagedClusterConfigEndpointConfigArgs

EnableHttpPortAccess Changes to this property will trigger replacement. bool
If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
HttpPorts Dictionary<string, string>
Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.
EnableHttpPortAccess Changes to this property will trigger replacement. bool
If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
HttpPorts map[string]string
Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.
enableHttpPortAccess Changes to this property will trigger replacement. Boolean
If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
httpPorts Map<String,String>
Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.
enableHttpPortAccess Changes to this property will trigger replacement. boolean
If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
httpPorts {[key: string]: string}
Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.
enable_http_port_access Changes to this property will trigger replacement. bool
If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
http_ports Mapping[str, str]
Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.
enableHttpPortAccess Changes to this property will trigger replacement. Boolean
If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
httpPorts Map<String>
Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.

WorkflowTemplatePlacementManagedClusterConfigGceClusterConfig
, WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs

InternalIpOnly Changes to this property will trigger replacement. bool
If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses.
Metadata Changes to this property will trigger replacement. Dictionary<string, string>
The Compute Engine metadata entries to add to all instances (see About VM metadata).
Network Changes to this property will trigger replacement. string
The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default`
NodeGroupAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity
Node Group Affinity for sole-tenant clusters.
PrivateIpv6GoogleAccess Changes to this property will trigger replacement. string
The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL
ReservationAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity
Reservation Affinity for consuming Zonal reservation.
ServiceAccount Changes to this property will trigger replacement. string
The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.
ServiceAccountScopes Changes to this property will trigger replacement. List<string>
The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control
ShieldedInstanceConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig
Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below.
Subnetwork Changes to this property will trigger replacement. string
The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0
Tags Changes to this property will trigger replacement. List<string>
The Compute Engine tags to add to all instances (see Manage tags for resources).
Zone Changes to this property will trigger replacement. string
The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f
InternalIpOnly Changes to this property will trigger replacement. bool
If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses.
Metadata Changes to this property will trigger replacement. map[string]string
The Compute Engine metadata entries to add to all instances (see About VM metadata).
Network Changes to this property will trigger replacement. string
The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default`
NodeGroupAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity
Node Group Affinity for sole-tenant clusters.
PrivateIpv6GoogleAccess Changes to this property will trigger replacement. string
The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL
ReservationAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity
Reservation Affinity for consuming Zonal reservation.
ServiceAccount Changes to this property will trigger replacement. string
The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.
ServiceAccountScopes Changes to this property will trigger replacement. []string
The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control
ShieldedInstanceConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig
Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below.
Subnetwork Changes to this property will trigger replacement. string
The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0
Tags Changes to this property will trigger replacement. []string
The Compute Engine tags to add to all instances (see Manage tags for resources).
Zone Changes to this property will trigger replacement. string
The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f
internalIpOnly Changes to this property will trigger replacement. Boolean
If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses.
metadata Changes to this property will trigger replacement. Map<String,String>
The Compute Engine metadata entries to add to all instances (see About VM metadata).
network Changes to this property will trigger replacement. String
The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default`
nodeGroupAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity
Node Group Affinity for sole-tenant clusters.
privateIpv6GoogleAccess Changes to this property will trigger replacement. String
The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL
reservationAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity
Reservation Affinity for consuming Zonal reservation.
serviceAccount Changes to this property will trigger replacement. String
The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.
serviceAccountScopes Changes to this property will trigger replacement. List<String>
The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control
shieldedInstanceConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig
Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below.
subnetwork Changes to this property will trigger replacement. String
The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0
tags Changes to this property will trigger replacement. List<String>
The Compute Engine tags to add to all instances (see Manage tags for resources).
zone Changes to this property will trigger replacement. String
The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f
internalIpOnly Changes to this property will trigger replacement. boolean
If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses.
metadata Changes to this property will trigger replacement. {[key: string]: string}
The Compute Engine metadata entries to add to all instances (see About VM metadata).
network Changes to this property will trigger replacement. string
The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default`
nodeGroupAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity
Node Group Affinity for sole-tenant clusters.
privateIpv6GoogleAccess Changes to this property will trigger replacement. string
The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL
reservationAffinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity
Reservation Affinity for consuming Zonal reservation.
serviceAccount Changes to this property will trigger replacement. string
The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.
serviceAccountScopes Changes to this property will trigger replacement. string[]
The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control
shieldedInstanceConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig
Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below.
subnetwork Changes to this property will trigger replacement. string
The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0
tags Changes to this property will trigger replacement. string[]
The Compute Engine tags to add to all instances (see Manage tags for resources).
zone Changes to this property will trigger replacement. string
The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f
internal_ip_only Changes to this property will trigger replacement. bool
If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses.
metadata Changes to this property will trigger replacement. Mapping[str, str]
The Compute Engine metadata entries to add to all instances (see About VM metadata).
network Changes to this property will trigger replacement. str
The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default`
node_group_affinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity
Node Group Affinity for sole-tenant clusters.
private_ipv6_google_access Changes to this property will trigger replacement. str
The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL
reservation_affinity Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity
Reservation Affinity for consuming Zonal reservation.
service_account Changes to this property will trigger replacement. str
The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.
service_account_scopes Changes to this property will trigger replacement. Sequence[str]
The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control
shielded_instance_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig
Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below.
subnetwork Changes to this property will trigger replacement. str
The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0
tags Changes to this property will trigger replacement. Sequence[str]
The Compute Engine tags to add to all instances (see Manage tags for resources).
zone Changes to this property will trigger replacement. str
The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f
internalIpOnly Changes to this property will trigger replacement. Boolean
If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses.
metadata Changes to this property will trigger replacement. Map<String>
The Compute Engine metadata entries to add to all instances (see About VM metadata).
network Changes to this property will trigger replacement. String
The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default`
nodeGroupAffinity Changes to this property will trigger replacement. Property Map
Node Group Affinity for sole-tenant clusters.
privateIpv6GoogleAccess Changes to this property will trigger replacement. String
The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL
reservationAffinity Changes to this property will trigger replacement. Property Map
Reservation Affinity for consuming Zonal reservation.
serviceAccount Changes to this property will trigger replacement. String
The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.
serviceAccountScopes Changes to this property will trigger replacement. List<String>
The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control
shieldedInstanceConfig Changes to this property will trigger replacement. Property Map
Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below.
subnetwork Changes to this property will trigger replacement. String
The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0
tags Changes to this property will trigger replacement. List<String>
The Compute Engine tags to add to all instances (see Manage tags for resources).
zone Changes to this property will trigger replacement. String
The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f

WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity
, WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityArgs

NodeGroup
This property is required.
Changes to this property will trigger replacement.
string
Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1`
NodeGroup
This property is required.
Changes to this property will trigger replacement.
string
Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1`
nodeGroup
This property is required.
Changes to this property will trigger replacement.
String
Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1`
nodeGroup
This property is required.
Changes to this property will trigger replacement.
string
Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1`
node_group
This property is required.
Changes to this property will trigger replacement.
str
Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1`
nodeGroup
This property is required.
Changes to this property will trigger replacement.
String
Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1`

WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity
, WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityArgs

ConsumeReservationType Changes to this property will trigger replacement. string
Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION
Key Changes to this property will trigger replacement. string
Corresponds to the label key of reservation resource.
Values Changes to this property will trigger replacement. List<string>
Corresponds to the label values of reservation resource.
ConsumeReservationType Changes to this property will trigger replacement. string
Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION
Key Changes to this property will trigger replacement. string
Corresponds to the label key of reservation resource.
Values Changes to this property will trigger replacement. []string
Corresponds to the label values of reservation resource.
consumeReservationType Changes to this property will trigger replacement. String
Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION
key Changes to this property will trigger replacement. String
Corresponds to the label key of reservation resource.
values Changes to this property will trigger replacement. List<String>
Corresponds to the label values of reservation resource.
consumeReservationType Changes to this property will trigger replacement. string
Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION
key Changes to this property will trigger replacement. string
Corresponds to the label key of reservation resource.
values Changes to this property will trigger replacement. string[]
Corresponds to the label values of reservation resource.
consume_reservation_type Changes to this property will trigger replacement. str
Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION
key Changes to this property will trigger replacement. str
Corresponds to the label key of reservation resource.
values Changes to this property will trigger replacement. Sequence[str]
Corresponds to the label values of reservation resource.
consumeReservationType Changes to this property will trigger replacement. String
Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION
key Changes to this property will trigger replacement. String
Corresponds to the label key of reservation resource.
values Changes to this property will trigger replacement. List<String>
Corresponds to the label values of reservation resource.

WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig
, WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigArgs

EnableIntegrityMonitoring Changes to this property will trigger replacement. bool
Defines whether instances have Integrity Monitoring enabled.
EnableSecureBoot Changes to this property will trigger replacement. bool
Defines whether instances have Secure Boot enabled.
EnableVtpm Changes to this property will trigger replacement. bool
Defines whether instances have the vTPM enabled.
EnableIntegrityMonitoring Changes to this property will trigger replacement. bool
Defines whether instances have Integrity Monitoring enabled.
EnableSecureBoot Changes to this property will trigger replacement. bool
Defines whether instances have Secure Boot enabled.
EnableVtpm Changes to this property will trigger replacement. bool
Defines whether instances have the vTPM enabled.
enableIntegrityMonitoring Changes to this property will trigger replacement. Boolean
Defines whether instances have Integrity Monitoring enabled.
enableSecureBoot Changes to this property will trigger replacement. Boolean
Defines whether instances have Secure Boot enabled.
enableVtpm Changes to this property will trigger replacement. Boolean
Defines whether instances have the vTPM enabled.
enableIntegrityMonitoring Changes to this property will trigger replacement. boolean
Defines whether instances have Integrity Monitoring enabled.
enableSecureBoot Changes to this property will trigger replacement. boolean
Defines whether instances have Secure Boot enabled.
enableVtpm Changes to this property will trigger replacement. boolean
Defines whether instances have the vTPM enabled.
enable_integrity_monitoring Changes to this property will trigger replacement. bool
Defines whether instances have Integrity Monitoring enabled.
enable_secure_boot Changes to this property will trigger replacement. bool
Defines whether instances have Secure Boot enabled.
enable_vtpm Changes to this property will trigger replacement. bool
Defines whether instances have the vTPM enabled.
enableIntegrityMonitoring Changes to this property will trigger replacement. Boolean
Defines whether instances have Integrity Monitoring enabled.
enableSecureBoot Changes to this property will trigger replacement. Boolean
Defines whether instances have Secure Boot enabled.
enableVtpm Changes to this property will trigger replacement. Boolean
Defines whether instances have the vTPM enabled.

WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfig
, WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigArgs

NamespacedGkeDeploymentTarget Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTarget
A target for the deployment.
NamespacedGkeDeploymentTarget Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTarget
A target for the deployment.
namespacedGkeDeploymentTarget Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTarget
A target for the deployment.
namespacedGkeDeploymentTarget Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTarget
A target for the deployment.
namespacedGkeDeploymentTarget Changes to this property will trigger replacement. Property Map
A target for the deployment.

WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTarget
, WorkflowTemplatePlacementManagedClusterConfigGkeClusterConfigNamespacedGkeDeploymentTargetArgs

ClusterNamespace Changes to this property will trigger replacement. string
A namespace within the GKE cluster to deploy into.
TargetGkeCluster Changes to this property will trigger replacement. string
The target GKE cluster to deploy to. Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}'
ClusterNamespace Changes to this property will trigger replacement. string
A namespace within the GKE cluster to deploy into.
TargetGkeCluster Changes to this property will trigger replacement. string
The target GKE cluster to deploy to. Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}'
clusterNamespace Changes to this property will trigger replacement. String
A namespace within the GKE cluster to deploy into.
targetGkeCluster Changes to this property will trigger replacement. String
The target GKE cluster to deploy to. Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}'
clusterNamespace Changes to this property will trigger replacement. string
A namespace within the GKE cluster to deploy into.
targetGkeCluster Changes to this property will trigger replacement. string
The target GKE cluster to deploy to. Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}'
cluster_namespace Changes to this property will trigger replacement. str
A namespace within the GKE cluster to deploy into.
target_gke_cluster Changes to this property will trigger replacement. str
The target GKE cluster to deploy to. Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}'
clusterNamespace Changes to this property will trigger replacement. String
A namespace within the GKE cluster to deploy into.
targetGkeCluster Changes to this property will trigger replacement. String
The target GKE cluster to deploy to. Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}'

WorkflowTemplatePlacementManagedClusterConfigInitializationAction
, WorkflowTemplatePlacementManagedClusterConfigInitializationActionArgs

ExecutableFile Changes to this property will trigger replacement. string
Required. Cloud Storage URI of executable file.
ExecutionTimeout Changes to this property will trigger replacement. string
Amount of time executable has to complete. Default is 10 minutes (see JSON representation of JSON Mapping - Language Guide (proto 3)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period.
ExecutableFile Changes to this property will trigger replacement. string
Required. Cloud Storage URI of executable file.
ExecutionTimeout Changes to this property will trigger replacement. string
Amount of time executable has to complete. Default is 10 minutes (see JSON representation of JSON Mapping - Language Guide (proto 3)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period.
executableFile Changes to this property will trigger replacement. String
Required. Cloud Storage URI of executable file.
executionTimeout Changes to this property will trigger replacement. String
Amount of time executable has to complete. Default is 10 minutes (see JSON representation of JSON Mapping - Language Guide (proto 3)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period.
executableFile Changes to this property will trigger replacement. string
Required. Cloud Storage URI of executable file.
executionTimeout Changes to this property will trigger replacement. string
Amount of time executable has to complete. Default is 10 minutes (see JSON representation of JSON Mapping - Language Guide (proto 3)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period.
executable_file Changes to this property will trigger replacement. str
Required. Cloud Storage URI of executable file.
execution_timeout Changes to this property will trigger replacement. str
Amount of time executable has to complete. Default is 10 minutes (see JSON representation of JSON Mapping - Language Guide (proto 3)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period.
executableFile Changes to this property will trigger replacement. String
Required. Cloud Storage URI of executable file.
executionTimeout Changes to this property will trigger replacement. String
Amount of time executable has to complete. Default is 10 minutes (see JSON representation of JSON Mapping - Language Guide (proto 3)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period.

WorkflowTemplatePlacementManagedClusterConfigLifecycleConfig
, WorkflowTemplatePlacementManagedClusterConfigLifecycleConfigArgs

AutoDeleteTime Changes to this property will trigger replacement. string
The time when cluster will be auto-deleted (see JSON representation of JSON Mapping - Language Guide (proto 3)).
AutoDeleteTtl Changes to this property will trigger replacement. string
The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3)).
IdleDeleteTtl Changes to this property will trigger replacement. string
The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3).
IdleStartTime string
Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of JSON Mapping - Language Guide (proto 3)).
AutoDeleteTime Changes to this property will trigger replacement. string
The time when cluster will be auto-deleted (see JSON representation of JSON Mapping - Language Guide (proto 3)).
AutoDeleteTtl Changes to this property will trigger replacement. string
The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3)).
IdleDeleteTtl Changes to this property will trigger replacement. string
The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3).
IdleStartTime string
Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of JSON Mapping - Language Guide (proto 3)).
autoDeleteTime Changes to this property will trigger replacement. String
The time when cluster will be auto-deleted (see JSON representation of JSON Mapping - Language Guide (proto 3)).
autoDeleteTtl Changes to this property will trigger replacement. String
The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3)).
idleDeleteTtl Changes to this property will trigger replacement. String
The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3).
idleStartTime String
Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of JSON Mapping - Language Guide (proto 3)).
autoDeleteTime Changes to this property will trigger replacement. string
The time when cluster will be auto-deleted (see JSON representation of JSON Mapping - Language Guide (proto 3)).
autoDeleteTtl Changes to this property will trigger replacement. string
The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3)).
idleDeleteTtl Changes to this property will trigger replacement. string
The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3).
idleStartTime string
Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of JSON Mapping - Language Guide (proto 3)).
auto_delete_time Changes to this property will trigger replacement. str
The time when cluster will be auto-deleted (see JSON representation of JSON Mapping - Language Guide (proto 3)).
auto_delete_ttl Changes to this property will trigger replacement. str
The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3)).
idle_delete_ttl Changes to this property will trigger replacement. str
The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3).
idle_start_time str
Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of JSON Mapping - Language Guide (proto 3)).
autoDeleteTime Changes to this property will trigger replacement. String
The time when cluster will be auto-deleted (see JSON representation of JSON Mapping - Language Guide (proto 3)).
autoDeleteTtl Changes to this property will trigger replacement. String
The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3)).
idleDeleteTtl Changes to this property will trigger replacement. String
The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of JSON Mapping - Language Guide (proto 3).
idleStartTime String
Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of JSON Mapping - Language Guide (proto 3)).

WorkflowTemplatePlacementManagedClusterConfigMasterConfig
, WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs

Accelerators Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerator>
The Compute Engine accelerator configuration for these instances.
DiskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig
Disk option config settings.
Image Changes to this property will trigger replacement. string
The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
InstanceNames List<string>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
IsPreemptible bool
Output only. Specifies that this instance group contains preemptible instances.
MachineType Changes to this property will trigger replacement. string
The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`.
ManagedGroupConfigs List<WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
MinCpuPlatform Changes to this property will trigger replacement. string
Specifies the minimum cpu platform for the Instance Group. See Minimum CPU platform.
NumInstances Changes to this property will trigger replacement. int
The number of VM instances in the instance group. For master instance groups, must be set to 1.
Preemptibility Changes to this property will trigger replacement. string
Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
Accelerators Changes to this property will trigger replacement. []WorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerator
The Compute Engine accelerator configuration for these instances.
DiskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig
Disk option config settings.
Image Changes to this property will trigger replacement. string
The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
InstanceNames []string
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
IsPreemptible bool
Output only. Specifies that this instance group contains preemptible instances.
MachineType Changes to this property will trigger replacement. string
The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`.
ManagedGroupConfigs []WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
MinCpuPlatform Changes to this property will trigger replacement. string
Specifies the minimum cpu platform for the Instance Group. See Minimum CPU platform.
NumInstances Changes to this property will trigger replacement. int
The number of VM instances in the instance group. For master instance groups, must be set to 1.
Preemptibility Changes to this property will trigger replacement. string
Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerator>
The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig
Disk option config settings.
image Changes to this property will trigger replacement. String
The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames List<String>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible Boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. String
The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`.
managedGroupConfigs List<WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. String
Specifies the minimum cpu platform for the Instance Group. See Minimum CPU platform.
numInstances Changes to this property will trigger replacement. Integer
The number of VM instances in the instance group. For master instance groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. String
Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerator[]
The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig
Disk option config settings.
image Changes to this property will trigger replacement. string
The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames string[]
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. string
The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`.
managedGroupConfigs WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig[]
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. string
Specifies the minimum cpu platform for the Instance Group. See Minimum CPU platform.
numInstances Changes to this property will trigger replacement. number
The number of VM instances in the instance group. For master instance groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. string
Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. Sequence[WorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerator]
The Compute Engine accelerator configuration for these instances.
disk_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig
Disk option config settings.
image Changes to this property will trigger replacement. str
The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instance_names Sequence[str]
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
is_preemptible bool
Output only. Specifies that this instance group contains preemptible instances.
machine_type Changes to this property will trigger replacement. str
The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`.
managed_group_configs Sequence[WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig]
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
min_cpu_platform Changes to this property will trigger replacement. str
Specifies the minimum cpu platform for the Instance Group. See Minimum CPU platform.
num_instances Changes to this property will trigger replacement. int
The number of VM instances in the instance group. For master instance groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. str
Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. List<Property Map>
The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. Property Map
Disk option config settings.
image Changes to this property will trigger replacement. String
The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames List<String>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible Boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. String
The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`.
managedGroupConfigs List<Property Map>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. String
Specifies the minimum cpu platform for the Instance Group. See Minimum CPU platform.
numInstances Changes to this property will trigger replacement. Number
The number of VM instances in the instance group. For master instance groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. String
Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE

WorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerator
, WorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorArgs

AcceleratorCount Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
AcceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
AcceleratorCount Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
AcceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. Integer
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. String
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. number
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
accelerator_count Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
accelerator_type Changes to this property will trigger replacement. str
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. Number
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. String
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.

WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig
, WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs

BootDiskSizeGb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
BootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
NumLocalSsds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
BootDiskSizeGb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
BootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
NumLocalSsds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. Integer
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. String
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. Integer
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. number
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. number
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
boot_disk_size_gb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
boot_disk_type Changes to this property will trigger replacement. str
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
num_local_ssds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. Number
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. String
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. Number
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.

WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig
, WorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigArgs

InstanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
InstanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
InstanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
InstanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName String
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName String
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
instance_group_manager_name str
Output only. The name of the Instance Group Manager for this group.
instance_template_name str
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName String
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName String
Output only. The name of the Instance Template used for the Managed Instance Group.

WorkflowTemplatePlacementManagedClusterConfigMetastoreConfig
, WorkflowTemplatePlacementManagedClusterConfigMetastoreConfigArgs

DataprocMetastoreService
This property is required.
Changes to this property will trigger replacement.
string
Required. Resource name of an existing Dataproc Metastore service. Example: * projects/
DataprocMetastoreService
This property is required.
Changes to this property will trigger replacement.
string
Required. Resource name of an existing Dataproc Metastore service. Example: * projects/
dataprocMetastoreService
This property is required.
Changes to this property will trigger replacement.
String
Required. Resource name of an existing Dataproc Metastore service. Example: * projects/
dataprocMetastoreService
This property is required.
Changes to this property will trigger replacement.
string
Required. Resource name of an existing Dataproc Metastore service. Example: * projects/
dataproc_metastore_service
This property is required.
Changes to this property will trigger replacement.
str
Required. Resource name of an existing Dataproc Metastore service. Example: * projects/
dataprocMetastoreService
This property is required.
Changes to this property will trigger replacement.
String
Required. Resource name of an existing Dataproc Metastore service. Example: * projects/

WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig
, WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs

Accelerators Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerator>
Optional. The Compute Engine accelerator configuration for these instances.
DiskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig
Optional. Disk option config settings.
Image Changes to this property will trigger replacement. string
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
InstanceNames List<string>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
IsPreemptible bool
Output only. Specifies that this instance group contains preemptible instances.
MachineType Changes to this property will trigger replacement. string
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
ManagedGroupConfigs List<WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
MinCpuPlatform Changes to this property will trigger replacement. string
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
NumInstances Changes to this property will trigger replacement. int
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
Preemptibility Changes to this property will trigger replacement. string
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
Accelerators Changes to this property will trigger replacement. []WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerator
Optional. The Compute Engine accelerator configuration for these instances.
DiskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig
Optional. Disk option config settings.
Image Changes to this property will trigger replacement. string
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
InstanceNames []string
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
IsPreemptible bool
Output only. Specifies that this instance group contains preemptible instances.
MachineType Changes to this property will trigger replacement. string
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
ManagedGroupConfigs []WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
MinCpuPlatform Changes to this property will trigger replacement. string
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
NumInstances Changes to this property will trigger replacement. int
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
Preemptibility Changes to this property will trigger replacement. string
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerator>
Optional. The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig
Optional. Disk option config settings.
image Changes to this property will trigger replacement. String
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames List<String>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible Boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. String
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managedGroupConfigs List<WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. String
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
numInstances Changes to this property will trigger replacement. Integer
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. String
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerator[]
Optional. The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig
Optional. Disk option config settings.
image Changes to this property will trigger replacement. string
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames string[]
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. string
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managedGroupConfigs WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig[]
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. string
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
numInstances Changes to this property will trigger replacement. number
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. string
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. Sequence[WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerator]
Optional. The Compute Engine accelerator configuration for these instances.
disk_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig
Optional. Disk option config settings.
image Changes to this property will trigger replacement. str
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instance_names Sequence[str]
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
is_preemptible bool
Output only. Specifies that this instance group contains preemptible instances.
machine_type Changes to this property will trigger replacement. str
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managed_group_configs Sequence[WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig]
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
min_cpu_platform Changes to this property will trigger replacement. str
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
num_instances Changes to this property will trigger replacement. int
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. str
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. List<Property Map>
Optional. The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. Property Map
Optional. Disk option config settings.
image Changes to this property will trigger replacement. String
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames List<String>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible Boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. String
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managedGroupConfigs List<Property Map>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. String
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
numInstances Changes to this property will trigger replacement. Number
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. String
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE

WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerator
, WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorArgs

AcceleratorCount Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
AcceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
AcceleratorCount Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
AcceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. Integer
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. String
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. number
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
accelerator_count Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
accelerator_type Changes to this property will trigger replacement. str
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. Number
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. String
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.

WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig
, WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigArgs

BootDiskSizeGb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
BootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
NumLocalSsds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
BootDiskSizeGb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
BootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
NumLocalSsds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. Integer
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. String
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. Integer
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. number
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. number
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
boot_disk_size_gb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
boot_disk_type Changes to this property will trigger replacement. str
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
num_local_ssds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. Number
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. String
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. Number
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.

WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig
, WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigArgs

InstanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
InstanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
InstanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
InstanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName String
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName String
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
instance_group_manager_name str
Output only. The name of the Instance Group Manager for this group.
instance_template_name str
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName String
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName String
Output only. The name of the Instance Template used for the Managed Instance Group.

WorkflowTemplatePlacementManagedClusterConfigSecurityConfig
, WorkflowTemplatePlacementManagedClusterConfigSecurityConfigArgs

KerberosConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig
Kerberos related configuration.
KerberosConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig
Kerberos related configuration.
kerberosConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig
Kerberos related configuration.
kerberosConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig
Kerberos related configuration.
kerberos_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig
Kerberos related configuration.
kerberosConfig Changes to this property will trigger replacement. Property Map
Kerberos related configuration.

WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig
, WorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigArgs

CrossRealmTrustAdminServer Changes to this property will trigger replacement. string
The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
CrossRealmTrustKdc Changes to this property will trigger replacement. string
The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
CrossRealmTrustRealm Changes to this property will trigger replacement. string
The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
CrossRealmTrustSharedPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
EnableKerberos Changes to this property will trigger replacement. bool
Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster.
KdcDbKey Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
KeyPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
Keystore Changes to this property will trigger replacement. string
The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
KeystorePassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc.
KmsKey Changes to this property will trigger replacement. string
The uri of the KMS key used to encrypt various sensitive files.
Realm Changes to this property will trigger replacement. string
The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
RootPrincipalPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the root principal password.
TgtLifetimeHours Changes to this property will trigger replacement. int
The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used.
Truststore Changes to this property will trigger replacement. string
The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
TruststorePassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.
CrossRealmTrustAdminServer Changes to this property will trigger replacement. string
The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
CrossRealmTrustKdc Changes to this property will trigger replacement. string
The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
CrossRealmTrustRealm Changes to this property will trigger replacement. string
The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
CrossRealmTrustSharedPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
EnableKerberos Changes to this property will trigger replacement. bool
Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster.
KdcDbKey Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
KeyPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
Keystore Changes to this property will trigger replacement. string
The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
KeystorePassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc.
KmsKey Changes to this property will trigger replacement. string
The uri of the KMS key used to encrypt various sensitive files.
Realm Changes to this property will trigger replacement. string
The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
RootPrincipalPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the root principal password.
TgtLifetimeHours Changes to this property will trigger replacement. int
The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used.
Truststore Changes to this property will trigger replacement. string
The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
TruststorePassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.
crossRealmTrustAdminServer Changes to this property will trigger replacement. String
The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
crossRealmTrustKdc Changes to this property will trigger replacement. String
The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
crossRealmTrustRealm Changes to this property will trigger replacement. String
The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
crossRealmTrustSharedPassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
enableKerberos Changes to this property will trigger replacement. Boolean
Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster.
kdcDbKey Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
keyPassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
keystore Changes to this property will trigger replacement. String
The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
keystorePassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc.
kmsKey Changes to this property will trigger replacement. String
The uri of the KMS key used to encrypt various sensitive files.
realm Changes to this property will trigger replacement. String
The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
rootPrincipalPassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the root principal password.
tgtLifetimeHours Changes to this property will trigger replacement. Integer
The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used.
truststore Changes to this property will trigger replacement. String
The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
truststorePassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.
crossRealmTrustAdminServer Changes to this property will trigger replacement. string
The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
crossRealmTrustKdc Changes to this property will trigger replacement. string
The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
crossRealmTrustRealm Changes to this property will trigger replacement. string
The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
crossRealmTrustSharedPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
enableKerberos Changes to this property will trigger replacement. boolean
Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster.
kdcDbKey Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
keyPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
keystore Changes to this property will trigger replacement. string
The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
keystorePassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc.
kmsKey Changes to this property will trigger replacement. string
The uri of the KMS key used to encrypt various sensitive files.
realm Changes to this property will trigger replacement. string
The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
rootPrincipalPassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the root principal password.
tgtLifetimeHours Changes to this property will trigger replacement. number
The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used.
truststore Changes to this property will trigger replacement. string
The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
truststorePassword Changes to this property will trigger replacement. string
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.
cross_realm_trust_admin_server Changes to this property will trigger replacement. str
The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
cross_realm_trust_kdc Changes to this property will trigger replacement. str
The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
cross_realm_trust_realm Changes to this property will trigger replacement. str
The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
cross_realm_trust_shared_password Changes to this property will trigger replacement. str
The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
enable_kerberos Changes to this property will trigger replacement. bool
Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster.
kdc_db_key Changes to this property will trigger replacement. str
The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
key_password Changes to this property will trigger replacement. str
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
keystore Changes to this property will trigger replacement. str
The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
keystore_password Changes to this property will trigger replacement. str
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc.
kms_key Changes to this property will trigger replacement. str
The uri of the KMS key used to encrypt various sensitive files.
realm Changes to this property will trigger replacement. str
The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
root_principal_password Changes to this property will trigger replacement. str
The Cloud Storage URI of a KMS encrypted file containing the root principal password.
tgt_lifetime_hours Changes to this property will trigger replacement. int
The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used.
truststore Changes to this property will trigger replacement. str
The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
truststore_password Changes to this property will trigger replacement. str
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.
crossRealmTrustAdminServer Changes to this property will trigger replacement. String
The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
crossRealmTrustKdc Changes to this property will trigger replacement. String
The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
crossRealmTrustRealm Changes to this property will trigger replacement. String
The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
crossRealmTrustSharedPassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
enableKerberos Changes to this property will trigger replacement. Boolean
Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster.
kdcDbKey Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
keyPassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
keystore Changes to this property will trigger replacement. String
The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
keystorePassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc.
kmsKey Changes to this property will trigger replacement. String
The uri of the KMS key used to encrypt various sensitive files.
realm Changes to this property will trigger replacement. String
The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
rootPrincipalPassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the root principal password.
tgtLifetimeHours Changes to this property will trigger replacement. Number
The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used.
truststore Changes to this property will trigger replacement. String
The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
truststorePassword Changes to this property will trigger replacement. String
The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.

WorkflowTemplatePlacementManagedClusterConfigSoftwareConfig
, WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs

ImageVersion Changes to this property will trigger replacement. string
The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version.
OptionalComponents Changes to this property will trigger replacement. List<string>
The set of components to activate on the cluster.
Properties Changes to this property will trigger replacement. Dictionary<string, string>

The properties to set on daemon config files.

Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. The following are supported prefixes and their mappings:

  • capacity-scheduler: capacity-scheduler.xml
  • core: core-site.xml
  • distcp: distcp-default.xml
  • hdfs: hdfs-site.xml
  • hive: hive-site.xml
  • mapred: mapred-site.xml
  • pig: pig.properties
  • spark: spark-defaults.conf
  • yarn: yarn-site.xml

For more information, see Cluster properties.

ImageVersion Changes to this property will trigger replacement. string
The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version.
OptionalComponents Changes to this property will trigger replacement. []string
The set of components to activate on the cluster.
Properties Changes to this property will trigger replacement. map[string]string

The properties to set on daemon config files.

Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. The following are supported prefixes and their mappings:

  • capacity-scheduler: capacity-scheduler.xml
  • core: core-site.xml
  • distcp: distcp-default.xml
  • hdfs: hdfs-site.xml
  • hive: hive-site.xml
  • mapred: mapred-site.xml
  • pig: pig.properties
  • spark: spark-defaults.conf
  • yarn: yarn-site.xml

For more information, see Cluster properties.

imageVersion Changes to this property will trigger replacement. String
The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version.
optionalComponents Changes to this property will trigger replacement. List<String>
The set of components to activate on the cluster.
properties Changes to this property will trigger replacement. Map<String,String>

The properties to set on daemon config files.

Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. The following are supported prefixes and their mappings:

  • capacity-scheduler: capacity-scheduler.xml
  • core: core-site.xml
  • distcp: distcp-default.xml
  • hdfs: hdfs-site.xml
  • hive: hive-site.xml
  • mapred: mapred-site.xml
  • pig: pig.properties
  • spark: spark-defaults.conf
  • yarn: yarn-site.xml

For more information, see Cluster properties.

imageVersion Changes to this property will trigger replacement. string
The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version.
optionalComponents Changes to this property will trigger replacement. string[]
The set of components to activate on the cluster.
properties Changes to this property will trigger replacement. {[key: string]: string}

The properties to set on daemon config files.

Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. The following are supported prefixes and their mappings:

  • capacity-scheduler: capacity-scheduler.xml
  • core: core-site.xml
  • distcp: distcp-default.xml
  • hdfs: hdfs-site.xml
  • hive: hive-site.xml
  • mapred: mapred-site.xml
  • pig: pig.properties
  • spark: spark-defaults.conf
  • yarn: yarn-site.xml

For more information, see Cluster properties.

image_version Changes to this property will trigger replacement. str
The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version.
optional_components Changes to this property will trigger replacement. Sequence[str]
The set of components to activate on the cluster.
properties Changes to this property will trigger replacement. Mapping[str, str]

The properties to set on daemon config files.

Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. The following are supported prefixes and their mappings:

  • capacity-scheduler: capacity-scheduler.xml
  • core: core-site.xml
  • distcp: distcp-default.xml
  • hdfs: hdfs-site.xml
  • hive: hive-site.xml
  • mapred: mapred-site.xml
  • pig: pig.properties
  • spark: spark-defaults.conf
  • yarn: yarn-site.xml

For more information, see Cluster properties.

imageVersion Changes to this property will trigger replacement. String
The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version.
optionalComponents Changes to this property will trigger replacement. List<String>
The set of components to activate on the cluster.
properties Changes to this property will trigger replacement. Map<String>

The properties to set on daemon config files.

Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. The following are supported prefixes and their mappings:

  • capacity-scheduler: capacity-scheduler.xml
  • core: core-site.xml
  • distcp: distcp-default.xml
  • hdfs: hdfs-site.xml
  • hive: hive-site.xml
  • mapred: mapred-site.xml
  • pig: pig.properties
  • spark: spark-defaults.conf
  • yarn: yarn-site.xml

For more information, see Cluster properties.

WorkflowTemplatePlacementManagedClusterConfigWorkerConfig
, WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs

Accelerators Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerator>
Optional. The Compute Engine accelerator configuration for these instances.
DiskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig
Optional. Disk option config settings.
Image Changes to this property will trigger replacement. string
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
InstanceNames List<string>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
IsPreemptible bool
Output only. Specifies that this instance group contains preemptible instances.
MachineType Changes to this property will trigger replacement. string
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
ManagedGroupConfigs List<WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
MinCpuPlatform Changes to this property will trigger replacement. string
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
NumInstances Changes to this property will trigger replacement. int
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
Preemptibility Changes to this property will trigger replacement. string
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
Accelerators Changes to this property will trigger replacement. []WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerator
Optional. The Compute Engine accelerator configuration for these instances.
DiskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig
Optional. Disk option config settings.
Image Changes to this property will trigger replacement. string
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
InstanceNames []string
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
IsPreemptible bool
Output only. Specifies that this instance group contains preemptible instances.
MachineType Changes to this property will trigger replacement. string
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
ManagedGroupConfigs []WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
MinCpuPlatform Changes to this property will trigger replacement. string
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
NumInstances Changes to this property will trigger replacement. int
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
Preemptibility Changes to this property will trigger replacement. string
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. List<WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerator>
Optional. The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig
Optional. Disk option config settings.
image Changes to this property will trigger replacement. String
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames List<String>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible Boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. String
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managedGroupConfigs List<WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. String
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
numInstances Changes to this property will trigger replacement. Integer
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. String
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerator[]
Optional. The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig
Optional. Disk option config settings.
image Changes to this property will trigger replacement. string
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames string[]
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. string
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managedGroupConfigs WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig[]
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. string
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
numInstances Changes to this property will trigger replacement. number
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. string
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. Sequence[WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerator]
Optional. The Compute Engine accelerator configuration for these instances.
disk_config Changes to this property will trigger replacement. WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig
Optional. Disk option config settings.
image Changes to this property will trigger replacement. str
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instance_names Sequence[str]
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
is_preemptible bool
Output only. Specifies that this instance group contains preemptible instances.
machine_type Changes to this property will trigger replacement. str
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managed_group_configs Sequence[WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig]
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
min_cpu_platform Changes to this property will trigger replacement. str
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
num_instances Changes to this property will trigger replacement. int
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. str
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
accelerators Changes to this property will trigger replacement. List<Property Map>
Optional. The Compute Engine accelerator configuration for these instances.
diskConfig Changes to this property will trigger replacement. Property Map
Optional. Disk option config settings.
image Changes to this property will trigger replacement. String
Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id] * projects/[project_id]/global/images/[image-id] * image-id Image family examples. Dataproc will use the most recent image from the family: * https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name] * projects/[project_id]/global/images/family/[custom-image-family-name] If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default.
instanceNames List<String>
Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group.
isPreemptible Boolean
Output only. Specifies that this instance group contains preemptible instances.
machineType Changes to this property will trigger replacement. String
Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 * n1-standard-2 Auto Zone Exception: If you are using the Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2.
managedGroupConfigs List<Property Map>
Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
minCpuPlatform Changes to this property will trigger replacement. String
Optional. Specifies the minimum cpu platform for the Instance Group. See Dataproc > Minimum CPU Platform.
numInstances Changes to this property will trigger replacement. Number
Optional. The number of VM instances in the instance group. For HA cluster master_config groups, must be set to 3. For standard cluster master_config groups, must be set to 1.
preemptibility Changes to this property will trigger replacement. String
Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE

WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerator
, WorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorArgs

AcceleratorCount Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
AcceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
AcceleratorCount Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
AcceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. Integer
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. String
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. number
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. string
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
accelerator_count Changes to this property will trigger replacement. int
The number of the accelerator cards of this type exposed to this instance.
accelerator_type Changes to this property will trigger replacement. str
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.
acceleratorCount Changes to this property will trigger replacement. Number
The number of the accelerator cards of this type exposed to this instance.
acceleratorType Changes to this property will trigger replacement. String
Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80.

WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig
, WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs

BootDiskSizeGb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
BootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
NumLocalSsds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
BootDiskSizeGb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
BootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
NumLocalSsds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. Integer
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. String
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. Integer
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. number
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. string
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. number
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
boot_disk_size_gb Changes to this property will trigger replacement. int
Size in GB of the boot disk (default is 500GB).
boot_disk_type Changes to this property will trigger replacement. str
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
num_local_ssds Changes to this property will trigger replacement. int
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
bootDiskSizeGb Changes to this property will trigger replacement. Number
Size in GB of the boot disk (default is 500GB).
bootDiskType Changes to this property will trigger replacement. String
Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive).
numLocalSsds Changes to this property will trigger replacement. Number
Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.

WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig
, WorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigArgs

InstanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
InstanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
InstanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
InstanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName String
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName String
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName string
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName string
Output only. The name of the Instance Template used for the Managed Instance Group.
instance_group_manager_name str
Output only. The name of the Instance Group Manager for this group.
instance_template_name str
Output only. The name of the Instance Template used for the Managed Instance Group.
instanceGroupManagerName String
Output only. The name of the Instance Group Manager for this group.
instanceTemplateName String
Output only. The name of the Instance Template used for the Managed Instance Group.

Import

WorkflowTemplate can be imported using any of these accepted formats:

  • projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}

  • {{project}}/{{location}}/{{name}}

  • {{location}}/{{name}}

When using the pulumi import command, WorkflowTemplate can be imported using one of the formats above. For example:

$ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}
Copy
$ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{project}}/{{location}}/{{name}}
Copy
$ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{location}}/{{name}}
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Google Cloud (GCP) Classic pulumi/pulumi-gcp
License
Apache-2.0
Notes
This Pulumi package is based on the google-beta Terraform Provider.