1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. bigquery
  5. DataTransferConfig
Google Cloud v8.23.0 published on Monday, Mar 24, 2025 by Pulumi

gcp.bigquery.DataTransferConfig

Explore with Pulumi AI

Represents a data transfer configuration. A transfer configuration contains all metadata needed to perform a data transfer.

To get more information about Config, see:

Note: All arguments marked as write-only values will not be stored in the state: sensitive_params.secret_access_key_wo. Read more about Write-only Attributes.

Example Usage

Bigquerydatatransfer Config Scheduled Query

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const project = gcp.organizations.getProject({});
const permissions = new gcp.projects.IAMMember("permissions", {
    project: project.then(project => project.projectId),
    role: "roles/iam.serviceAccountTokenCreator",
    member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com`),
});
const myDataset = new gcp.bigquery.Dataset("my_dataset", {
    datasetId: "my_dataset",
    friendlyName: "foo",
    description: "bar",
    location: "asia-northeast1",
}, {
    dependsOn: [permissions],
});
const queryConfig = new gcp.bigquery.DataTransferConfig("query_config", {
    displayName: "my-query",
    location: "asia-northeast1",
    dataSourceId: "scheduled_query",
    schedule: "first sunday of quarter 00:00",
    destinationDatasetId: myDataset.datasetId,
    params: {
        destination_table_name_template: "my_table",
        write_disposition: "WRITE_APPEND",
        query: "SELECT name FROM tabl WHERE x = 'y'",
    },
}, {
    dependsOn: [permissions],
});
Copy
import pulumi
import pulumi_gcp as gcp

project = gcp.organizations.get_project()
permissions = gcp.projects.IAMMember("permissions",
    project=project.project_id,
    role="roles/iam.serviceAccountTokenCreator",
    member=f"serviceAccount:service-{project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com")
my_dataset = gcp.bigquery.Dataset("my_dataset",
    dataset_id="my_dataset",
    friendly_name="foo",
    description="bar",
    location="asia-northeast1",
    opts = pulumi.ResourceOptions(depends_on=[permissions]))
query_config = gcp.bigquery.DataTransferConfig("query_config",
    display_name="my-query",
    location="asia-northeast1",
    data_source_id="scheduled_query",
    schedule="first sunday of quarter 00:00",
    destination_dataset_id=my_dataset.dataset_id,
    params={
        "destination_table_name_template": "my_table",
        "write_disposition": "WRITE_APPEND",
        "query": "SELECT name FROM tabl WHERE x = 'y'",
    },
    opts = pulumi.ResourceOptions(depends_on=[permissions]))
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/projects"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
		if err != nil {
			return err
		}
		permissions, err := projects.NewIAMMember(ctx, "permissions", &projects.IAMMemberArgs{
			Project: pulumi.String(project.ProjectId),
			Role:    pulumi.String("roles/iam.serviceAccountTokenCreator"),
			Member:  pulumi.Sprintf("serviceAccount:service-%v@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.Number),
		})
		if err != nil {
			return err
		}
		myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
			DatasetId:    pulumi.String("my_dataset"),
			FriendlyName: pulumi.String("foo"),
			Description:  pulumi.String("bar"),
			Location:     pulumi.String("asia-northeast1"),
		}, pulumi.DependsOn([]pulumi.Resource{
			permissions,
		}))
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataTransferConfig(ctx, "query_config", &bigquery.DataTransferConfigArgs{
			DisplayName:          pulumi.String("my-query"),
			Location:             pulumi.String("asia-northeast1"),
			DataSourceId:         pulumi.String("scheduled_query"),
			Schedule:             pulumi.String("first sunday of quarter 00:00"),
			DestinationDatasetId: myDataset.DatasetId,
			Params: pulumi.StringMap{
				"destination_table_name_template": pulumi.String("my_table"),
				"write_disposition":               pulumi.String("WRITE_APPEND"),
				"query":                           pulumi.String("SELECT name FROM tabl WHERE x = 'y'"),
			},
		}, pulumi.DependsOn([]pulumi.Resource{
			permissions,
		}))
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var project = Gcp.Organizations.GetProject.Invoke();

    var permissions = new Gcp.Projects.IAMMember("permissions", new()
    {
        Project = project.Apply(getProjectResult => getProjectResult.ProjectId),
        Role = "roles/iam.serviceAccountTokenCreator",
        Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com",
    });

    var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
    {
        DatasetId = "my_dataset",
        FriendlyName = "foo",
        Description = "bar",
        Location = "asia-northeast1",
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            permissions,
        },
    });

    var queryConfig = new Gcp.BigQuery.DataTransferConfig("query_config", new()
    {
        DisplayName = "my-query",
        Location = "asia-northeast1",
        DataSourceId = "scheduled_query",
        Schedule = "first sunday of quarter 00:00",
        DestinationDatasetId = myDataset.DatasetId,
        Params = 
        {
            { "destination_table_name_template", "my_table" },
            { "write_disposition", "WRITE_APPEND" },
            { "query", "SELECT name FROM tabl WHERE x = 'y'" },
        },
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            permissions,
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DataTransferConfig;
import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var project = OrganizationsFunctions.getProject();

        var permissions = new IAMMember("permissions", IAMMemberArgs.builder()
            .project(project.applyValue(getProjectResult -> getProjectResult.projectId()))
            .role("roles/iam.serviceAccountTokenCreator")
            .member(String.format("serviceAccount:service-%s@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
            .build());

        var myDataset = new Dataset("myDataset", DatasetArgs.builder()
            .datasetId("my_dataset")
            .friendlyName("foo")
            .description("bar")
            .location("asia-northeast1")
            .build(), CustomResourceOptions.builder()
                .dependsOn(permissions)
                .build());

        var queryConfig = new DataTransferConfig("queryConfig", DataTransferConfigArgs.builder()
            .displayName("my-query")
            .location("asia-northeast1")
            .dataSourceId("scheduled_query")
            .schedule("first sunday of quarter 00:00")
            .destinationDatasetId(myDataset.datasetId())
            .params(Map.ofEntries(
                Map.entry("destination_table_name_template", "my_table"),
                Map.entry("write_disposition", "WRITE_APPEND"),
                Map.entry("query", "SELECT name FROM tabl WHERE x = 'y'")
            ))
            .build(), CustomResourceOptions.builder()
                .dependsOn(permissions)
                .build());

    }
}
Copy
resources:
  permissions:
    type: gcp:projects:IAMMember
    properties:
      project: ${project.projectId}
      role: roles/iam.serviceAccountTokenCreator
      member: serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com
  queryConfig:
    type: gcp:bigquery:DataTransferConfig
    name: query_config
    properties:
      displayName: my-query
      location: asia-northeast1
      dataSourceId: scheduled_query
      schedule: first sunday of quarter 00:00
      destinationDatasetId: ${myDataset.datasetId}
      params:
        destination_table_name_template: my_table
        write_disposition: WRITE_APPEND
        query: SELECT name FROM tabl WHERE x = 'y'
    options:
      dependsOn:
        - ${permissions}
  myDataset:
    type: gcp:bigquery:Dataset
    name: my_dataset
    properties:
      datasetId: my_dataset
      friendlyName: foo
      description: bar
      location: asia-northeast1
    options:
      dependsOn:
        - ${permissions}
variables:
  project:
    fn::invoke:
      function: gcp:organizations:getProject
      arguments: {}
Copy

Bigquerydatatransfer Config Cmek

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const project = gcp.organizations.getProject({});
const permissions = new gcp.projects.IAMMember("permissions", {
    project: project.then(project => project.projectId),
    role: "roles/iam.serviceAccountTokenCreator",
    member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com`),
});
const myDataset = new gcp.bigquery.Dataset("my_dataset", {
    datasetId: "example_dataset",
    friendlyName: "foo",
    description: "bar",
    location: "asia-northeast1",
}, {
    dependsOn: [permissions],
});
const keyRing = new gcp.kms.KeyRing("key_ring", {
    name: "example-keyring",
    location: "us",
});
const cryptoKey = new gcp.kms.CryptoKey("crypto_key", {
    name: "example-key",
    keyRing: keyRing.id,
});
const queryConfigCmek = new gcp.bigquery.DataTransferConfig("query_config_cmek", {
    displayName: "",
    location: "asia-northeast1",
    dataSourceId: "scheduled_query",
    schedule: "first sunday of quarter 00:00",
    destinationDatasetId: myDataset.datasetId,
    params: {
        destination_table_name_template: "my_table",
        write_disposition: "WRITE_APPEND",
        query: "SELECT name FROM tabl WHERE x = 'y'",
    },
    encryptionConfiguration: {
        kmsKeyName: cryptoKey.id,
    },
}, {
    dependsOn: [permissions],
});
Copy
import pulumi
import pulumi_gcp as gcp

project = gcp.organizations.get_project()
permissions = gcp.projects.IAMMember("permissions",
    project=project.project_id,
    role="roles/iam.serviceAccountTokenCreator",
    member=f"serviceAccount:service-{project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com")
my_dataset = gcp.bigquery.Dataset("my_dataset",
    dataset_id="example_dataset",
    friendly_name="foo",
    description="bar",
    location="asia-northeast1",
    opts = pulumi.ResourceOptions(depends_on=[permissions]))
key_ring = gcp.kms.KeyRing("key_ring",
    name="example-keyring",
    location="us")
crypto_key = gcp.kms.CryptoKey("crypto_key",
    name="example-key",
    key_ring=key_ring.id)
query_config_cmek = gcp.bigquery.DataTransferConfig("query_config_cmek",
    display_name="",
    location="asia-northeast1",
    data_source_id="scheduled_query",
    schedule="first sunday of quarter 00:00",
    destination_dataset_id=my_dataset.dataset_id,
    params={
        "destination_table_name_template": "my_table",
        "write_disposition": "WRITE_APPEND",
        "query": "SELECT name FROM tabl WHERE x = 'y'",
    },
    encryption_configuration={
        "kms_key_name": crypto_key.id,
    },
    opts = pulumi.ResourceOptions(depends_on=[permissions]))
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/kms"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/projects"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
		if err != nil {
			return err
		}
		permissions, err := projects.NewIAMMember(ctx, "permissions", &projects.IAMMemberArgs{
			Project: pulumi.String(project.ProjectId),
			Role:    pulumi.String("roles/iam.serviceAccountTokenCreator"),
			Member:  pulumi.Sprintf("serviceAccount:service-%v@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.Number),
		})
		if err != nil {
			return err
		}
		myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
			DatasetId:    pulumi.String("example_dataset"),
			FriendlyName: pulumi.String("foo"),
			Description:  pulumi.String("bar"),
			Location:     pulumi.String("asia-northeast1"),
		}, pulumi.DependsOn([]pulumi.Resource{
			permissions,
		}))
		if err != nil {
			return err
		}
		keyRing, err := kms.NewKeyRing(ctx, "key_ring", &kms.KeyRingArgs{
			Name:     pulumi.String("example-keyring"),
			Location: pulumi.String("us"),
		})
		if err != nil {
			return err
		}
		cryptoKey, err := kms.NewCryptoKey(ctx, "crypto_key", &kms.CryptoKeyArgs{
			Name:    pulumi.String("example-key"),
			KeyRing: keyRing.ID(),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataTransferConfig(ctx, "query_config_cmek", &bigquery.DataTransferConfigArgs{
			DisplayName:          pulumi.String(""),
			Location:             pulumi.String("asia-northeast1"),
			DataSourceId:         pulumi.String("scheduled_query"),
			Schedule:             pulumi.String("first sunday of quarter 00:00"),
			DestinationDatasetId: myDataset.DatasetId,
			Params: pulumi.StringMap{
				"destination_table_name_template": pulumi.String("my_table"),
				"write_disposition":               pulumi.String("WRITE_APPEND"),
				"query":                           pulumi.String("SELECT name FROM tabl WHERE x = 'y'"),
			},
			EncryptionConfiguration: &bigquery.DataTransferConfigEncryptionConfigurationArgs{
				KmsKeyName: cryptoKey.ID(),
			},
		}, pulumi.DependsOn([]pulumi.Resource{
			permissions,
		}))
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var project = Gcp.Organizations.GetProject.Invoke();

    var permissions = new Gcp.Projects.IAMMember("permissions", new()
    {
        Project = project.Apply(getProjectResult => getProjectResult.ProjectId),
        Role = "roles/iam.serviceAccountTokenCreator",
        Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com",
    });

    var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "foo",
        Description = "bar",
        Location = "asia-northeast1",
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            permissions,
        },
    });

    var keyRing = new Gcp.Kms.KeyRing("key_ring", new()
    {
        Name = "example-keyring",
        Location = "us",
    });

    var cryptoKey = new Gcp.Kms.CryptoKey("crypto_key", new()
    {
        Name = "example-key",
        KeyRing = keyRing.Id,
    });

    var queryConfigCmek = new Gcp.BigQuery.DataTransferConfig("query_config_cmek", new()
    {
        DisplayName = "",
        Location = "asia-northeast1",
        DataSourceId = "scheduled_query",
        Schedule = "first sunday of quarter 00:00",
        DestinationDatasetId = myDataset.DatasetId,
        Params = 
        {
            { "destination_table_name_template", "my_table" },
            { "write_disposition", "WRITE_APPEND" },
            { "query", "SELECT name FROM tabl WHERE x = 'y'" },
        },
        EncryptionConfiguration = new Gcp.BigQuery.Inputs.DataTransferConfigEncryptionConfigurationArgs
        {
            KmsKeyName = cryptoKey.Id,
        },
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            permissions,
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.kms.KeyRing;
import com.pulumi.gcp.kms.KeyRingArgs;
import com.pulumi.gcp.kms.CryptoKey;
import com.pulumi.gcp.kms.CryptoKeyArgs;
import com.pulumi.gcp.bigquery.DataTransferConfig;
import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
import com.pulumi.gcp.bigquery.inputs.DataTransferConfigEncryptionConfigurationArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var project = OrganizationsFunctions.getProject();

        var permissions = new IAMMember("permissions", IAMMemberArgs.builder()
            .project(project.applyValue(getProjectResult -> getProjectResult.projectId()))
            .role("roles/iam.serviceAccountTokenCreator")
            .member(String.format("serviceAccount:service-%s@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
            .build());

        var myDataset = new Dataset("myDataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("foo")
            .description("bar")
            .location("asia-northeast1")
            .build(), CustomResourceOptions.builder()
                .dependsOn(permissions)
                .build());

        var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
            .name("example-keyring")
            .location("us")
            .build());

        var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
            .name("example-key")
            .keyRing(keyRing.id())
            .build());

        var queryConfigCmek = new DataTransferConfig("queryConfigCmek", DataTransferConfigArgs.builder()
            .displayName("")
            .location("asia-northeast1")
            .dataSourceId("scheduled_query")
            .schedule("first sunday of quarter 00:00")
            .destinationDatasetId(myDataset.datasetId())
            .params(Map.ofEntries(
                Map.entry("destination_table_name_template", "my_table"),
                Map.entry("write_disposition", "WRITE_APPEND"),
                Map.entry("query", "SELECT name FROM tabl WHERE x = 'y'")
            ))
            .encryptionConfiguration(DataTransferConfigEncryptionConfigurationArgs.builder()
                .kmsKeyName(cryptoKey.id())
                .build())
            .build(), CustomResourceOptions.builder()
                .dependsOn(permissions)
                .build());

    }
}
Copy
resources:
  permissions:
    type: gcp:projects:IAMMember
    properties:
      project: ${project.projectId}
      role: roles/iam.serviceAccountTokenCreator
      member: serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com
  queryConfigCmek:
    type: gcp:bigquery:DataTransferConfig
    name: query_config_cmek
    properties:
      displayName: ""
      location: asia-northeast1
      dataSourceId: scheduled_query
      schedule: first sunday of quarter 00:00
      destinationDatasetId: ${myDataset.datasetId}
      params:
        destination_table_name_template: my_table
        write_disposition: WRITE_APPEND
        query: SELECT name FROM tabl WHERE x = 'y'
      encryptionConfiguration:
        kmsKeyName: ${cryptoKey.id}
    options:
      dependsOn:
        - ${permissions}
  myDataset:
    type: gcp:bigquery:Dataset
    name: my_dataset
    properties:
      datasetId: example_dataset
      friendlyName: foo
      description: bar
      location: asia-northeast1
    options:
      dependsOn:
        - ${permissions}
  cryptoKey:
    type: gcp:kms:CryptoKey
    name: crypto_key
    properties:
      name: example-key
      keyRing: ${keyRing.id}
  keyRing:
    type: gcp:kms:KeyRing
    name: key_ring
    properties:
      name: example-keyring
      location: us
variables:
  project:
    fn::invoke:
      function: gcp:organizations:getProject
      arguments: {}
Copy

Bigquerydatatransfer Config Salesforce

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const project = gcp.organizations.getProject({});
const myDataset = new gcp.bigquery.Dataset("my_dataset", {
    datasetId: "my_dataset",
    description: "My dataset",
    location: "asia-northeast1",
});
const salesforceConfig = new gcp.bigquery.DataTransferConfig("salesforce_config", {
    displayName: "my-salesforce-config",
    location: "asia-northeast1",
    dataSourceId: "salesforce",
    schedule: "first sunday of quarter 00:00",
    destinationDatasetId: myDataset.datasetId,
    params: {
        "connector.authentication.oauth.clientId": "client-id",
        "connector.authentication.oauth.clientSecret": "client-secret",
        "connector.authentication.oauth.myDomain": "MyDomainName",
        assets: "[\"asset-a\",\"asset-b\"]",
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

project = gcp.organizations.get_project()
my_dataset = gcp.bigquery.Dataset("my_dataset",
    dataset_id="my_dataset",
    description="My dataset",
    location="asia-northeast1")
salesforce_config = gcp.bigquery.DataTransferConfig("salesforce_config",
    display_name="my-salesforce-config",
    location="asia-northeast1",
    data_source_id="salesforce",
    schedule="first sunday of quarter 00:00",
    destination_dataset_id=my_dataset.dataset_id,
    params={
        "connector.authentication.oauth.clientId": "client-id",
        "connector.authentication.oauth.clientSecret": "client-secret",
        "connector.authentication.oauth.myDomain": "MyDomainName",
        "assets": "[\"asset-a\",\"asset-b\"]",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
		if err != nil {
			return err
		}
		myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
			DatasetId:   pulumi.String("my_dataset"),
			Description: pulumi.String("My dataset"),
			Location:    pulumi.String("asia-northeast1"),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataTransferConfig(ctx, "salesforce_config", &bigquery.DataTransferConfigArgs{
			DisplayName:          pulumi.String("my-salesforce-config"),
			Location:             pulumi.String("asia-northeast1"),
			DataSourceId:         pulumi.String("salesforce"),
			Schedule:             pulumi.String("first sunday of quarter 00:00"),
			DestinationDatasetId: myDataset.DatasetId,
			Params: pulumi.StringMap{
				"connector.authentication.oauth.clientId":     pulumi.String("client-id"),
				"connector.authentication.oauth.clientSecret": pulumi.String("client-secret"),
				"connector.authentication.oauth.myDomain":     pulumi.String("MyDomainName"),
				"assets": pulumi.String("[\"asset-a\",\"asset-b\"]"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var project = Gcp.Organizations.GetProject.Invoke();

    var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
    {
        DatasetId = "my_dataset",
        Description = "My dataset",
        Location = "asia-northeast1",
    });

    var salesforceConfig = new Gcp.BigQuery.DataTransferConfig("salesforce_config", new()
    {
        DisplayName = "my-salesforce-config",
        Location = "asia-northeast1",
        DataSourceId = "salesforce",
        Schedule = "first sunday of quarter 00:00",
        DestinationDatasetId = myDataset.DatasetId,
        Params = 
        {
            { "connector.authentication.oauth.clientId", "client-id" },
            { "connector.authentication.oauth.clientSecret", "client-secret" },
            { "connector.authentication.oauth.myDomain", "MyDomainName" },
            { "assets", "[\"asset-a\",\"asset-b\"]" },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DataTransferConfig;
import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var project = OrganizationsFunctions.getProject();

        var myDataset = new Dataset("myDataset", DatasetArgs.builder()
            .datasetId("my_dataset")
            .description("My dataset")
            .location("asia-northeast1")
            .build());

        var salesforceConfig = new DataTransferConfig("salesforceConfig", DataTransferConfigArgs.builder()
            .displayName("my-salesforce-config")
            .location("asia-northeast1")
            .dataSourceId("salesforce")
            .schedule("first sunday of quarter 00:00")
            .destinationDatasetId(myDataset.datasetId())
            .params(Map.ofEntries(
                Map.entry("connector.authentication.oauth.clientId", "client-id"),
                Map.entry("connector.authentication.oauth.clientSecret", "client-secret"),
                Map.entry("connector.authentication.oauth.myDomain", "MyDomainName"),
                Map.entry("assets", "[\"asset-a\",\"asset-b\"]")
            ))
            .build());

    }
}
Copy
resources:
  myDataset:
    type: gcp:bigquery:Dataset
    name: my_dataset
    properties:
      datasetId: my_dataset
      description: My dataset
      location: asia-northeast1
  salesforceConfig:
    type: gcp:bigquery:DataTransferConfig
    name: salesforce_config
    properties:
      displayName: my-salesforce-config
      location: asia-northeast1
      dataSourceId: salesforce
      schedule: first sunday of quarter 00:00
      destinationDatasetId: ${myDataset.datasetId}
      params:
        connector.authentication.oauth.clientId: client-id
        connector.authentication.oauth.clientSecret: client-secret
        connector.authentication.oauth.myDomain: MyDomainName
        assets: '["asset-a","asset-b"]'
variables:
  project:
    fn::invoke:
      function: gcp:organizations:getProject
      arguments: {}
Copy

Ephemeral Attributes Reference

The following write-only attributes are supported:

The sensitive_params block supports:

  • secret_access_key_wo - (Optional) The Secret Access Key of the AWS account transferring data from. Note: This property is write-only and will not be read from the API.

Create DataTransferConfig Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new DataTransferConfig(name: string, args: DataTransferConfigArgs, opts?: CustomResourceOptions);
@overload
def DataTransferConfig(resource_name: str,
                       args: DataTransferConfigArgs,
                       opts: Optional[ResourceOptions] = None)

@overload
def DataTransferConfig(resource_name: str,
                       opts: Optional[ResourceOptions] = None,
                       display_name: Optional[str] = None,
                       data_source_id: Optional[str] = None,
                       params: Optional[Mapping[str, str]] = None,
                       disabled: Optional[bool] = None,
                       data_refresh_window_days: Optional[int] = None,
                       email_preferences: Optional[DataTransferConfigEmailPreferencesArgs] = None,
                       encryption_configuration: Optional[DataTransferConfigEncryptionConfigurationArgs] = None,
                       location: Optional[str] = None,
                       notification_pubsub_topic: Optional[str] = None,
                       destination_dataset_id: Optional[str] = None,
                       project: Optional[str] = None,
                       schedule: Optional[str] = None,
                       schedule_options: Optional[DataTransferConfigScheduleOptionsArgs] = None,
                       sensitive_params: Optional[DataTransferConfigSensitiveParamsArgs] = None,
                       service_account_name: Optional[str] = None)
func NewDataTransferConfig(ctx *Context, name string, args DataTransferConfigArgs, opts ...ResourceOption) (*DataTransferConfig, error)
public DataTransferConfig(string name, DataTransferConfigArgs args, CustomResourceOptions? opts = null)
public DataTransferConfig(String name, DataTransferConfigArgs args)
public DataTransferConfig(String name, DataTransferConfigArgs args, CustomResourceOptions options)
type: gcp:bigquery:DataTransferConfig
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. DataTransferConfigArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. DataTransferConfigArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. DataTransferConfigArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. DataTransferConfigArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. DataTransferConfigArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var dataTransferConfigResource = new Gcp.BigQuery.DataTransferConfig("dataTransferConfigResource", new()
{
    DisplayName = "string",
    DataSourceId = "string",
    Params = 
    {
        { "string", "string" },
    },
    Disabled = false,
    DataRefreshWindowDays = 0,
    EmailPreferences = new Gcp.BigQuery.Inputs.DataTransferConfigEmailPreferencesArgs
    {
        EnableFailureEmail = false,
    },
    EncryptionConfiguration = new Gcp.BigQuery.Inputs.DataTransferConfigEncryptionConfigurationArgs
    {
        KmsKeyName = "string",
    },
    Location = "string",
    NotificationPubsubTopic = "string",
    DestinationDatasetId = "string",
    Project = "string",
    Schedule = "string",
    ScheduleOptions = new Gcp.BigQuery.Inputs.DataTransferConfigScheduleOptionsArgs
    {
        DisableAutoScheduling = false,
        EndTime = "string",
        StartTime = "string",
    },
    SensitiveParams = new Gcp.BigQuery.Inputs.DataTransferConfigSensitiveParamsArgs
    {
        SecretAccessKey = "string",
    },
    ServiceAccountName = "string",
});
Copy
example, err := bigquery.NewDataTransferConfig(ctx, "dataTransferConfigResource", &bigquery.DataTransferConfigArgs{
	DisplayName:  pulumi.String("string"),
	DataSourceId: pulumi.String("string"),
	Params: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Disabled:              pulumi.Bool(false),
	DataRefreshWindowDays: pulumi.Int(0),
	EmailPreferences: &bigquery.DataTransferConfigEmailPreferencesArgs{
		EnableFailureEmail: pulumi.Bool(false),
	},
	EncryptionConfiguration: &bigquery.DataTransferConfigEncryptionConfigurationArgs{
		KmsKeyName: pulumi.String("string"),
	},
	Location:                pulumi.String("string"),
	NotificationPubsubTopic: pulumi.String("string"),
	DestinationDatasetId:    pulumi.String("string"),
	Project:                 pulumi.String("string"),
	Schedule:                pulumi.String("string"),
	ScheduleOptions: &bigquery.DataTransferConfigScheduleOptionsArgs{
		DisableAutoScheduling: pulumi.Bool(false),
		EndTime:               pulumi.String("string"),
		StartTime:             pulumi.String("string"),
	},
	SensitiveParams: &bigquery.DataTransferConfigSensitiveParamsArgs{
		SecretAccessKey: pulumi.String("string"),
	},
	ServiceAccountName: pulumi.String("string"),
})
Copy
var dataTransferConfigResource = new DataTransferConfig("dataTransferConfigResource", DataTransferConfigArgs.builder()
    .displayName("string")
    .dataSourceId("string")
    .params(Map.of("string", "string"))
    .disabled(false)
    .dataRefreshWindowDays(0)
    .emailPreferences(DataTransferConfigEmailPreferencesArgs.builder()
        .enableFailureEmail(false)
        .build())
    .encryptionConfiguration(DataTransferConfigEncryptionConfigurationArgs.builder()
        .kmsKeyName("string")
        .build())
    .location("string")
    .notificationPubsubTopic("string")
    .destinationDatasetId("string")
    .project("string")
    .schedule("string")
    .scheduleOptions(DataTransferConfigScheduleOptionsArgs.builder()
        .disableAutoScheduling(false)
        .endTime("string")
        .startTime("string")
        .build())
    .sensitiveParams(DataTransferConfigSensitiveParamsArgs.builder()
        .secretAccessKey("string")
        .build())
    .serviceAccountName("string")
    .build());
Copy
data_transfer_config_resource = gcp.bigquery.DataTransferConfig("dataTransferConfigResource",
    display_name="string",
    data_source_id="string",
    params={
        "string": "string",
    },
    disabled=False,
    data_refresh_window_days=0,
    email_preferences={
        "enable_failure_email": False,
    },
    encryption_configuration={
        "kms_key_name": "string",
    },
    location="string",
    notification_pubsub_topic="string",
    destination_dataset_id="string",
    project="string",
    schedule="string",
    schedule_options={
        "disable_auto_scheduling": False,
        "end_time": "string",
        "start_time": "string",
    },
    sensitive_params={
        "secret_access_key": "string",
    },
    service_account_name="string")
Copy
const dataTransferConfigResource = new gcp.bigquery.DataTransferConfig("dataTransferConfigResource", {
    displayName: "string",
    dataSourceId: "string",
    params: {
        string: "string",
    },
    disabled: false,
    dataRefreshWindowDays: 0,
    emailPreferences: {
        enableFailureEmail: false,
    },
    encryptionConfiguration: {
        kmsKeyName: "string",
    },
    location: "string",
    notificationPubsubTopic: "string",
    destinationDatasetId: "string",
    project: "string",
    schedule: "string",
    scheduleOptions: {
        disableAutoScheduling: false,
        endTime: "string",
        startTime: "string",
    },
    sensitiveParams: {
        secretAccessKey: "string",
    },
    serviceAccountName: "string",
});
Copy
type: gcp:bigquery:DataTransferConfig
properties:
    dataRefreshWindowDays: 0
    dataSourceId: string
    destinationDatasetId: string
    disabled: false
    displayName: string
    emailPreferences:
        enableFailureEmail: false
    encryptionConfiguration:
        kmsKeyName: string
    location: string
    notificationPubsubTopic: string
    params:
        string: string
    project: string
    schedule: string
    scheduleOptions:
        disableAutoScheduling: false
        endTime: string
        startTime: string
    sensitiveParams:
        secretAccessKey: string
    serviceAccountName: string
Copy

DataTransferConfig Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The DataTransferConfig resource accepts the following input properties:

DataSourceId
This property is required.
Changes to this property will trigger replacement.
string
The data source id. Cannot be changed once the transfer config is created.
DisplayName This property is required. string
The user specified display name for the transfer config.
Params This property is required. Dictionary<string, string>
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


DataRefreshWindowDays int
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
DestinationDatasetId string
The BigQuery target dataset id.
Disabled bool
When set to true, no runs are scheduled for a given transfer.
EmailPreferences DataTransferConfigEmailPreferences
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
EncryptionConfiguration DataTransferConfigEncryptionConfiguration
Represents the encryption configuration for a transfer. Structure is documented below.
Location Changes to this property will trigger replacement. string
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
NotificationPubsubTopic string
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Schedule string
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
ScheduleOptions DataTransferConfigScheduleOptions
Options customizing the data transfer schedule. Structure is documented below.
SensitiveParams DataTransferConfigSensitiveParams
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
ServiceAccountName string
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
DataSourceId
This property is required.
Changes to this property will trigger replacement.
string
The data source id. Cannot be changed once the transfer config is created.
DisplayName This property is required. string
The user specified display name for the transfer config.
Params This property is required. map[string]string
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


DataRefreshWindowDays int
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
DestinationDatasetId string
The BigQuery target dataset id.
Disabled bool
When set to true, no runs are scheduled for a given transfer.
EmailPreferences DataTransferConfigEmailPreferencesArgs
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
EncryptionConfiguration DataTransferConfigEncryptionConfigurationArgs
Represents the encryption configuration for a transfer. Structure is documented below.
Location Changes to this property will trigger replacement. string
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
NotificationPubsubTopic string
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Schedule string
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
ScheduleOptions DataTransferConfigScheduleOptionsArgs
Options customizing the data transfer schedule. Structure is documented below.
SensitiveParams DataTransferConfigSensitiveParamsArgs
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
ServiceAccountName string
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
dataSourceId
This property is required.
Changes to this property will trigger replacement.
String
The data source id. Cannot be changed once the transfer config is created.
displayName This property is required. String
The user specified display name for the transfer config.
params This property is required. Map<String,String>
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


dataRefreshWindowDays Integer
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
destinationDatasetId String
The BigQuery target dataset id.
disabled Boolean
When set to true, no runs are scheduled for a given transfer.
emailPreferences DataTransferConfigEmailPreferences
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryptionConfiguration DataTransferConfigEncryptionConfiguration
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. String
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
notificationPubsubTopic String
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule String
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
scheduleOptions DataTransferConfigScheduleOptions
Options customizing the data transfer schedule. Structure is documented below.
sensitiveParams DataTransferConfigSensitiveParams
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
serviceAccountName String
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
dataSourceId
This property is required.
Changes to this property will trigger replacement.
string
The data source id. Cannot be changed once the transfer config is created.
displayName This property is required. string
The user specified display name for the transfer config.
params This property is required. {[key: string]: string}
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


dataRefreshWindowDays number
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
destinationDatasetId string
The BigQuery target dataset id.
disabled boolean
When set to true, no runs are scheduled for a given transfer.
emailPreferences DataTransferConfigEmailPreferences
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryptionConfiguration DataTransferConfigEncryptionConfiguration
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. string
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
notificationPubsubTopic string
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule string
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
scheduleOptions DataTransferConfigScheduleOptions
Options customizing the data transfer schedule. Structure is documented below.
sensitiveParams DataTransferConfigSensitiveParams
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
serviceAccountName string
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
data_source_id
This property is required.
Changes to this property will trigger replacement.
str
The data source id. Cannot be changed once the transfer config is created.
display_name This property is required. str
The user specified display name for the transfer config.
params This property is required. Mapping[str, str]
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


data_refresh_window_days int
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
destination_dataset_id str
The BigQuery target dataset id.
disabled bool
When set to true, no runs are scheduled for a given transfer.
email_preferences DataTransferConfigEmailPreferencesArgs
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryption_configuration DataTransferConfigEncryptionConfigurationArgs
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. str
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
notification_pubsub_topic str
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
project Changes to this property will trigger replacement. str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule str
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
schedule_options DataTransferConfigScheduleOptionsArgs
Options customizing the data transfer schedule. Structure is documented below.
sensitive_params DataTransferConfigSensitiveParamsArgs
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
service_account_name str
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
dataSourceId
This property is required.
Changes to this property will trigger replacement.
String
The data source id. Cannot be changed once the transfer config is created.
displayName This property is required. String
The user specified display name for the transfer config.
params This property is required. Map<String>
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


dataRefreshWindowDays Number
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
destinationDatasetId String
The BigQuery target dataset id.
disabled Boolean
When set to true, no runs are scheduled for a given transfer.
emailPreferences Property Map
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryptionConfiguration Property Map
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. String
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
notificationPubsubTopic String
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule String
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
scheduleOptions Property Map
Options customizing the data transfer schedule. Structure is documented below.
sensitiveParams Property Map
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
serviceAccountName String
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.

Outputs

All input properties are implicitly available as output properties. Additionally, the DataTransferConfig resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Name string
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
Id string
The provider-assigned unique ID for this managed resource.
Name string
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
id String
The provider-assigned unique ID for this managed resource.
name String
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
id string
The provider-assigned unique ID for this managed resource.
name string
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
id str
The provider-assigned unique ID for this managed resource.
name str
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
id String
The provider-assigned unique ID for this managed resource.
name String
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.

Look up Existing DataTransferConfig Resource

Get an existing DataTransferConfig resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DataTransferConfigState, opts?: CustomResourceOptions): DataTransferConfig
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        data_refresh_window_days: Optional[int] = None,
        data_source_id: Optional[str] = None,
        destination_dataset_id: Optional[str] = None,
        disabled: Optional[bool] = None,
        display_name: Optional[str] = None,
        email_preferences: Optional[DataTransferConfigEmailPreferencesArgs] = None,
        encryption_configuration: Optional[DataTransferConfigEncryptionConfigurationArgs] = None,
        location: Optional[str] = None,
        name: Optional[str] = None,
        notification_pubsub_topic: Optional[str] = None,
        params: Optional[Mapping[str, str]] = None,
        project: Optional[str] = None,
        schedule: Optional[str] = None,
        schedule_options: Optional[DataTransferConfigScheduleOptionsArgs] = None,
        sensitive_params: Optional[DataTransferConfigSensitiveParamsArgs] = None,
        service_account_name: Optional[str] = None) -> DataTransferConfig
func GetDataTransferConfig(ctx *Context, name string, id IDInput, state *DataTransferConfigState, opts ...ResourceOption) (*DataTransferConfig, error)
public static DataTransferConfig Get(string name, Input<string> id, DataTransferConfigState? state, CustomResourceOptions? opts = null)
public static DataTransferConfig get(String name, Output<String> id, DataTransferConfigState state, CustomResourceOptions options)
resources:  _:    type: gcp:bigquery:DataTransferConfig    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
DataRefreshWindowDays int
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
DataSourceId Changes to this property will trigger replacement. string
The data source id. Cannot be changed once the transfer config is created.
DestinationDatasetId string
The BigQuery target dataset id.
Disabled bool
When set to true, no runs are scheduled for a given transfer.
DisplayName string
The user specified display name for the transfer config.
EmailPreferences DataTransferConfigEmailPreferences
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
EncryptionConfiguration DataTransferConfigEncryptionConfiguration
Represents the encryption configuration for a transfer. Structure is documented below.
Location Changes to this property will trigger replacement. string
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
Name string
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
NotificationPubsubTopic string
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
Params Dictionary<string, string>
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Schedule string
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
ScheduleOptions DataTransferConfigScheduleOptions
Options customizing the data transfer schedule. Structure is documented below.
SensitiveParams DataTransferConfigSensitiveParams
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
ServiceAccountName string
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
DataRefreshWindowDays int
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
DataSourceId Changes to this property will trigger replacement. string
The data source id. Cannot be changed once the transfer config is created.
DestinationDatasetId string
The BigQuery target dataset id.
Disabled bool
When set to true, no runs are scheduled for a given transfer.
DisplayName string
The user specified display name for the transfer config.
EmailPreferences DataTransferConfigEmailPreferencesArgs
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
EncryptionConfiguration DataTransferConfigEncryptionConfigurationArgs
Represents the encryption configuration for a transfer. Structure is documented below.
Location Changes to this property will trigger replacement. string
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
Name string
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
NotificationPubsubTopic string
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
Params map[string]string
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Schedule string
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
ScheduleOptions DataTransferConfigScheduleOptionsArgs
Options customizing the data transfer schedule. Structure is documented below.
SensitiveParams DataTransferConfigSensitiveParamsArgs
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
ServiceAccountName string
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
dataRefreshWindowDays Integer
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
dataSourceId Changes to this property will trigger replacement. String
The data source id. Cannot be changed once the transfer config is created.
destinationDatasetId String
The BigQuery target dataset id.
disabled Boolean
When set to true, no runs are scheduled for a given transfer.
displayName String
The user specified display name for the transfer config.
emailPreferences DataTransferConfigEmailPreferences
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryptionConfiguration DataTransferConfigEncryptionConfiguration
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. String
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
name String
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
notificationPubsubTopic String
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
params Map<String,String>
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule String
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
scheduleOptions DataTransferConfigScheduleOptions
Options customizing the data transfer schedule. Structure is documented below.
sensitiveParams DataTransferConfigSensitiveParams
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
serviceAccountName String
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
dataRefreshWindowDays number
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
dataSourceId Changes to this property will trigger replacement. string
The data source id. Cannot be changed once the transfer config is created.
destinationDatasetId string
The BigQuery target dataset id.
disabled boolean
When set to true, no runs are scheduled for a given transfer.
displayName string
The user specified display name for the transfer config.
emailPreferences DataTransferConfigEmailPreferences
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryptionConfiguration DataTransferConfigEncryptionConfiguration
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. string
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
name string
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
notificationPubsubTopic string
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
params {[key: string]: string}
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule string
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
scheduleOptions DataTransferConfigScheduleOptions
Options customizing the data transfer schedule. Structure is documented below.
sensitiveParams DataTransferConfigSensitiveParams
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
serviceAccountName string
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
data_refresh_window_days int
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
data_source_id Changes to this property will trigger replacement. str
The data source id. Cannot be changed once the transfer config is created.
destination_dataset_id str
The BigQuery target dataset id.
disabled bool
When set to true, no runs are scheduled for a given transfer.
display_name str
The user specified display name for the transfer config.
email_preferences DataTransferConfigEmailPreferencesArgs
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryption_configuration DataTransferConfigEncryptionConfigurationArgs
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. str
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
name str
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
notification_pubsub_topic str
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
params Mapping[str, str]
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


project Changes to this property will trigger replacement. str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule str
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
schedule_options DataTransferConfigScheduleOptionsArgs
Options customizing the data transfer schedule. Structure is documented below.
sensitive_params DataTransferConfigSensitiveParamsArgs
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
service_account_name str
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.
dataRefreshWindowDays Number
The number of days to look back to automatically refresh the data. For example, if dataRefreshWindowDays = 10, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.
dataSourceId Changes to this property will trigger replacement. String
The data source id. Cannot be changed once the transfer config is created.
destinationDatasetId String
The BigQuery target dataset id.
disabled Boolean
When set to true, no runs are scheduled for a given transfer.
displayName String
The user specified display name for the transfer config.
emailPreferences Property Map
Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. Structure is documented below.
encryptionConfiguration Property Map
Represents the encryption configuration for a transfer. Structure is documented below.
location Changes to this property will trigger replacement. String
The geographic location where the transfer config should reside. Examples: US, EU, asia-northeast1. The default value is US.
name String
The resource name of the transfer config. Transfer config names have the form projects/{projectId}/locations/{location}/transferConfigs/{configId} or projects/{projectId}/transferConfigs/{configId}, where configId is usually a uuid, but this is not required. The name is ignored when creating a transfer config.
notificationPubsubTopic String
Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.
params Map<String>
Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.


project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
schedule String
Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the default value for the data source will be used. The specified times are in UTC. Examples of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, jun 13:15, and first sunday of quarter 00:00. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format NOTE: The minimum interval time between recurring transfers depends on the data source; refer to the documentation for your data source.
scheduleOptions Property Map
Options customizing the data transfer schedule. Structure is documented below.
sensitiveParams Property Map
Different parameters are configured primarily using the the params field on this resource. This block contains the parameters which contain secrets or passwords so that they can be marked sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key in the params map in the api request. Credentials may not be specified in both locations and will cause an error. Changing from one location to a different credential configuration in the config will require an apply to update state. Structure is documented below.
serviceAccountName String
Service account email. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service account.

Supporting Types

DataTransferConfigEmailPreferences
, DataTransferConfigEmailPreferencesArgs

EnableFailureEmail This property is required. bool
If true, email notifications will be sent on transfer run failures.
EnableFailureEmail This property is required. bool
If true, email notifications will be sent on transfer run failures.
enableFailureEmail This property is required. Boolean
If true, email notifications will be sent on transfer run failures.
enableFailureEmail This property is required. boolean
If true, email notifications will be sent on transfer run failures.
enable_failure_email This property is required. bool
If true, email notifications will be sent on transfer run failures.
enableFailureEmail This property is required. Boolean
If true, email notifications will be sent on transfer run failures.

DataTransferConfigEncryptionConfiguration
, DataTransferConfigEncryptionConfigurationArgs

KmsKeyName This property is required. string
The name of the KMS key used for encrypting BigQuery data.
KmsKeyName This property is required. string
The name of the KMS key used for encrypting BigQuery data.
kmsKeyName This property is required. String
The name of the KMS key used for encrypting BigQuery data.
kmsKeyName This property is required. string
The name of the KMS key used for encrypting BigQuery data.
kms_key_name This property is required. str
The name of the KMS key used for encrypting BigQuery data.
kmsKeyName This property is required. String
The name of the KMS key used for encrypting BigQuery data.

DataTransferConfigScheduleOptions
, DataTransferConfigScheduleOptionsArgs

DisableAutoScheduling bool
If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using transferConfigs.startManualRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
EndTime string
Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
StartTime string
Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
DisableAutoScheduling bool
If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using transferConfigs.startManualRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
EndTime string
Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
StartTime string
Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
disableAutoScheduling Boolean
If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using transferConfigs.startManualRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
endTime String
Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
startTime String
Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
disableAutoScheduling boolean
If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using transferConfigs.startManualRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
endTime string
Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
startTime string
Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
disable_auto_scheduling bool
If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using transferConfigs.startManualRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
end_time str
Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
start_time str
Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
disableAutoScheduling Boolean
If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using transferConfigs.startManualRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
endTime String
Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.
startTime String
Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.

DataTransferConfigSensitiveParams
, DataTransferConfigSensitiveParamsArgs

SecretAccessKey string
The Secret Access Key of the AWS account transferring data from.
SecretAccessKey string
The Secret Access Key of the AWS account transferring data from.
secretAccessKey String
The Secret Access Key of the AWS account transferring data from.
secretAccessKey string
The Secret Access Key of the AWS account transferring data from.
secret_access_key str
The Secret Access Key of the AWS account transferring data from.
secretAccessKey String
The Secret Access Key of the AWS account transferring data from.

Import

Config can be imported using any of these accepted formats:

  • {{project}}/{{name}}

  • {{project}} {{name}}

  • {{name}}

When using the pulumi import command, Config can be imported using one of the formats above. For example:

$ pulumi import gcp:bigquery/dataTransferConfig:DataTransferConfig default {{project}}/{{name}}
Copy
$ pulumi import gcp:bigquery/dataTransferConfig:DataTransferConfig default "{{project}} {{name}}"
Copy
$ pulumi import gcp:bigquery/dataTransferConfig:DataTransferConfig default {{name}}
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Google Cloud (GCP) Classic pulumi/pulumi-gcp
License
Apache-2.0
Notes
This Pulumi package is based on the google-beta Terraform Provider.