1. Packages
  2. Alibaba Cloud Provider
  3. API Docs
  4. gpdb
  5. StreamingJob
Alibaba Cloud v3.76.0 published on Tuesday, Apr 8, 2025 by Pulumi

alicloud.gpdb.StreamingJob

Explore with Pulumi AI

Provides a GPDB Streaming Job resource.

Real-time data tasks.

For information about GPDB Streaming Job and how to use it, see What is Streaming Job.

NOTE: Available since v1.231.0.

Example Usage

Basic Usage

import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";

const config = new pulumi.Config();
const name = config.get("name") || "terraform-example";
const defaultTXqb15 = new alicloud.vpc.Network("defaultTXqb15", {cidrBlock: "192.168.0.0/16"});
const defaultaSWhbT = new alicloud.vpc.Switch("defaultaSWhbT", {
    vpcId: defaultTXqb15.id,
    zoneId: "cn-beijing-h",
    cidrBlock: "192.168.1.0/24",
});
const defaulth2ghc1 = new alicloud.gpdb.Instance("defaulth2ghc1", {
    instanceSpec: "2C8G",
    description: name,
    segNodeNum: 2,
    segStorageType: "cloud_essd",
    instanceNetworkType: "VPC",
    dbInstanceCategory: "Basic",
    paymentType: "PayAsYouGo",
    sslEnabled: 0,
    engineVersion: "6.0",
    zoneId: "cn-beijing-h",
    vswitchId: defaultaSWhbT.id,
    storageSize: 50,
    masterCu: 4,
    vpcId: defaultTXqb15.id,
    dbInstanceMode: "StorageElastic",
    engine: "gpdb",
});
const default2dUszY = new alicloud.gpdb.StreamingDataService("default2dUszY", {
    serviceName: "example",
    dbInstanceId: defaulth2ghc1.id,
    serviceDescription: "example",
    serviceSpec: "8",
});
const defaultcDQItu = new alicloud.gpdb.StreamingDataSource("defaultcDQItu", {
    dbInstanceId: defaulth2ghc1.id,
    dataSourceName: "example",
    dataSourceConfig: JSON.stringify({
        brokers: "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
        delimiter: "|",
        format: "delimited",
        topic: "ziyuan_example",
    }),
    dataSourceType: "kafka",
    dataSourceDescription: "example",
    serviceId: default2dUszY.serviceId,
});
const _default = new alicloud.gpdb.StreamingJob("default", {
    account: "example_001",
    destSchema: "public",
    mode: "professional",
    jobName: "example-kafka",
    jobDescription: "example-kafka",
    destDatabase: "adb_sampledata_tpch",
    dbInstanceId: defaulth2ghc1.id,
    destTable: "customer",
    dataSourceId: defaultcDQItu.dataSourceId,
    password: "example_001",
    jobConfig: `ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
  INPUT:
    SOURCE:
      BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
      TOPIC: ziyuan_example
      FALLBACK_OFFSET: LATEST
    KEY:
      COLUMNS:
      - NAME: c_custkey
        TYPE: int
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \\'|\\'
    VALUE:
      COLUMNS:
      - NAME: c_comment
        TYPE: varchar
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \\'|\\'
    ERROR_LIMIT: 10
  OUTPUT:
    SCHEMA: public
    TABLE: customer
    MODE: MERGE
    MATCH_COLUMNS:
    - c_custkey
    ORDER_COLUMNS:
    - c_custkey
    UPDATE_COLUMNS:
    - c_custkey
    MAPPING:
    - NAME: c_custkey
      EXPRESSION: c_custkey
  COMMIT:
    MAX_ROW: 1000
    MINIMAL_INTERVAL: 1000
    CONSISTENCY: ATLEAST
  POLL:
    BATCHSIZE: 1000
    TIMEOUT: 1000
  PROPERTIES:
    group.id: ziyuan_example_01
`,
});
Copy
import pulumi
import json
import pulumi_alicloud as alicloud

config = pulumi.Config()
name = config.get("name")
if name is None:
    name = "terraform-example"
default_t_xqb15 = alicloud.vpc.Network("defaultTXqb15", cidr_block="192.168.0.0/16")
defaulta_s_whb_t = alicloud.vpc.Switch("defaultaSWhbT",
    vpc_id=default_t_xqb15.id,
    zone_id="cn-beijing-h",
    cidr_block="192.168.1.0/24")
defaulth2ghc1 = alicloud.gpdb.Instance("defaulth2ghc1",
    instance_spec="2C8G",
    description=name,
    seg_node_num=2,
    seg_storage_type="cloud_essd",
    instance_network_type="VPC",
    db_instance_category="Basic",
    payment_type="PayAsYouGo",
    ssl_enabled=0,
    engine_version="6.0",
    zone_id="cn-beijing-h",
    vswitch_id=defaulta_s_whb_t.id,
    storage_size=50,
    master_cu=4,
    vpc_id=default_t_xqb15.id,
    db_instance_mode="StorageElastic",
    engine="gpdb")
default2d_usz_y = alicloud.gpdb.StreamingDataService("default2dUszY",
    service_name="example",
    db_instance_id=defaulth2ghc1.id,
    service_description="example",
    service_spec="8")
defaultc_dq_itu = alicloud.gpdb.StreamingDataSource("defaultcDQItu",
    db_instance_id=defaulth2ghc1.id,
    data_source_name="example",
    data_source_config=json.dumps({
        "brokers": "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
        "delimiter": "|",
        "format": "delimited",
        "topic": "ziyuan_example",
    }),
    data_source_type="kafka",
    data_source_description="example",
    service_id=default2d_usz_y.service_id)
default = alicloud.gpdb.StreamingJob("default",
    account="example_001",
    dest_schema="public",
    mode="professional",
    job_name="example-kafka",
    job_description="example-kafka",
    dest_database="adb_sampledata_tpch",
    db_instance_id=defaulth2ghc1.id,
    dest_table="customer",
    data_source_id=defaultc_dq_itu.data_source_id,
    password="example_001",
    job_config="""ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
  INPUT:
    SOURCE:
      BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
      TOPIC: ziyuan_example
      FALLBACK_OFFSET: LATEST
    KEY:
      COLUMNS:
      - NAME: c_custkey
        TYPE: int
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    VALUE:
      COLUMNS:
      - NAME: c_comment
        TYPE: varchar
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    ERROR_LIMIT: 10
  OUTPUT:
    SCHEMA: public
    TABLE: customer
    MODE: MERGE
    MATCH_COLUMNS:
    - c_custkey
    ORDER_COLUMNS:
    - c_custkey
    UPDATE_COLUMNS:
    - c_custkey
    MAPPING:
    - NAME: c_custkey
      EXPRESSION: c_custkey
  COMMIT:
    MAX_ROW: 1000
    MINIMAL_INTERVAL: 1000
    CONSISTENCY: ATLEAST
  POLL:
    BATCHSIZE: 1000
    TIMEOUT: 1000
  PROPERTIES:
    group.id: ziyuan_example_01
""")
Copy
package main

import (
	"encoding/json"

	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/gpdb"
	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/vpc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		cfg := config.New(ctx, "")
		name := "terraform-example"
		if param := cfg.Get("name"); param != "" {
			name = param
		}
		defaultTXqb15, err := vpc.NewNetwork(ctx, "defaultTXqb15", &vpc.NetworkArgs{
			CidrBlock: pulumi.String("192.168.0.0/16"),
		})
		if err != nil {
			return err
		}
		defaultaSWhbT, err := vpc.NewSwitch(ctx, "defaultaSWhbT", &vpc.SwitchArgs{
			VpcId:     defaultTXqb15.ID(),
			ZoneId:    pulumi.String("cn-beijing-h"),
			CidrBlock: pulumi.String("192.168.1.0/24"),
		})
		if err != nil {
			return err
		}
		defaulth2ghc1, err := gpdb.NewInstance(ctx, "defaulth2ghc1", &gpdb.InstanceArgs{
			InstanceSpec:        pulumi.String("2C8G"),
			Description:         pulumi.String(name),
			SegNodeNum:          pulumi.Int(2),
			SegStorageType:      pulumi.String("cloud_essd"),
			InstanceNetworkType: pulumi.String("VPC"),
			DbInstanceCategory:  pulumi.String("Basic"),
			PaymentType:         pulumi.String("PayAsYouGo"),
			SslEnabled:          pulumi.Int(0),
			EngineVersion:       pulumi.String("6.0"),
			ZoneId:              pulumi.String("cn-beijing-h"),
			VswitchId:           defaultaSWhbT.ID(),
			StorageSize:         pulumi.Int(50),
			MasterCu:            pulumi.Int(4),
			VpcId:               defaultTXqb15.ID(),
			DbInstanceMode:      pulumi.String("StorageElastic"),
			Engine:              pulumi.String("gpdb"),
		})
		if err != nil {
			return err
		}
		default2dUszY, err := gpdb.NewStreamingDataService(ctx, "default2dUszY", &gpdb.StreamingDataServiceArgs{
			ServiceName:        pulumi.String("example"),
			DbInstanceId:       defaulth2ghc1.ID(),
			ServiceDescription: pulumi.String("example"),
			ServiceSpec:        pulumi.String("8"),
		})
		if err != nil {
			return err
		}
		tmpJSON0, err := json.Marshal(map[string]interface{}{
			"brokers":   "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
			"delimiter": "|",
			"format":    "delimited",
			"topic":     "ziyuan_example",
		})
		if err != nil {
			return err
		}
		json0 := string(tmpJSON0)
		defaultcDQItu, err := gpdb.NewStreamingDataSource(ctx, "defaultcDQItu", &gpdb.StreamingDataSourceArgs{
			DbInstanceId:          defaulth2ghc1.ID(),
			DataSourceName:        pulumi.String("example"),
			DataSourceConfig:      pulumi.String(json0),
			DataSourceType:        pulumi.String("kafka"),
			DataSourceDescription: pulumi.String("example"),
			ServiceId:             default2dUszY.ServiceId,
		})
		if err != nil {
			return err
		}
		_, err = gpdb.NewStreamingJob(ctx, "default", &gpdb.StreamingJobArgs{
			Account:        pulumi.String("example_001"),
			DestSchema:     pulumi.String("public"),
			Mode:           pulumi.String("professional"),
			JobName:        pulumi.String("example-kafka"),
			JobDescription: pulumi.String("example-kafka"),
			DestDatabase:   pulumi.String("adb_sampledata_tpch"),
			DbInstanceId:   defaulth2ghc1.ID(),
			DestTable:      pulumi.String("customer"),
			DataSourceId:   defaultcDQItu.DataSourceId,
			Password:       pulumi.String("example_001"),
			JobConfig: pulumi.String(`ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
  INPUT:
    SOURCE:
      BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
      TOPIC: ziyuan_example
      FALLBACK_OFFSET: LATEST
    KEY:
      COLUMNS:
      - NAME: c_custkey
        TYPE: int
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    VALUE:
      COLUMNS:
      - NAME: c_comment
        TYPE: varchar
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    ERROR_LIMIT: 10
  OUTPUT:
    SCHEMA: public
    TABLE: customer
    MODE: MERGE
    MATCH_COLUMNS:
    - c_custkey
    ORDER_COLUMNS:
    - c_custkey
    UPDATE_COLUMNS:
    - c_custkey
    MAPPING:
    - NAME: c_custkey
      EXPRESSION: c_custkey
  COMMIT:
    MAX_ROW: 1000
    MINIMAL_INTERVAL: 1000
    CONSISTENCY: ATLEAST
  POLL:
    BATCHSIZE: 1000
    TIMEOUT: 1000
  PROPERTIES:
    group.id: ziyuan_example_01
`),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using AliCloud = Pulumi.AliCloud;

return await Deployment.RunAsync(() => 
{
    var config = new Config();
    var name = config.Get("name") ?? "terraform-example";
    var defaultTXqb15 = new AliCloud.Vpc.Network("defaultTXqb15", new()
    {
        CidrBlock = "192.168.0.0/16",
    });

    var defaultaSWhbT = new AliCloud.Vpc.Switch("defaultaSWhbT", new()
    {
        VpcId = defaultTXqb15.Id,
        ZoneId = "cn-beijing-h",
        CidrBlock = "192.168.1.0/24",
    });

    var defaulth2ghc1 = new AliCloud.Gpdb.Instance("defaulth2ghc1", new()
    {
        InstanceSpec = "2C8G",
        Description = name,
        SegNodeNum = 2,
        SegStorageType = "cloud_essd",
        InstanceNetworkType = "VPC",
        DbInstanceCategory = "Basic",
        PaymentType = "PayAsYouGo",
        SslEnabled = 0,
        EngineVersion = "6.0",
        ZoneId = "cn-beijing-h",
        VswitchId = defaultaSWhbT.Id,
        StorageSize = 50,
        MasterCu = 4,
        VpcId = defaultTXqb15.Id,
        DbInstanceMode = "StorageElastic",
        Engine = "gpdb",
    });

    var default2dUszY = new AliCloud.Gpdb.StreamingDataService("default2dUszY", new()
    {
        ServiceName = "example",
        DbInstanceId = defaulth2ghc1.Id,
        ServiceDescription = "example",
        ServiceSpec = "8",
    });

    var defaultcDQItu = new AliCloud.Gpdb.StreamingDataSource("defaultcDQItu", new()
    {
        DbInstanceId = defaulth2ghc1.Id,
        DataSourceName = "example",
        DataSourceConfig = JsonSerializer.Serialize(new Dictionary<string, object?>
        {
            ["brokers"] = "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
            ["delimiter"] = "|",
            ["format"] = "delimited",
            ["topic"] = "ziyuan_example",
        }),
        DataSourceType = "kafka",
        DataSourceDescription = "example",
        ServiceId = default2dUszY.ServiceId,
    });

    var @default = new AliCloud.Gpdb.StreamingJob("default", new()
    {
        Account = "example_001",
        DestSchema = "public",
        Mode = "professional",
        JobName = "example-kafka",
        JobDescription = "example-kafka",
        DestDatabase = "adb_sampledata_tpch",
        DbInstanceId = defaulth2ghc1.Id,
        DestTable = "customer",
        DataSourceId = defaultcDQItu.DataSourceId,
        Password = "example_001",
        JobConfig = @"ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
  INPUT:
    SOURCE:
      BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
      TOPIC: ziyuan_example
      FALLBACK_OFFSET: LATEST
    KEY:
      COLUMNS:
      - NAME: c_custkey
        TYPE: int
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    VALUE:
      COLUMNS:
      - NAME: c_comment
        TYPE: varchar
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    ERROR_LIMIT: 10
  OUTPUT:
    SCHEMA: public
    TABLE: customer
    MODE: MERGE
    MATCH_COLUMNS:
    - c_custkey
    ORDER_COLUMNS:
    - c_custkey
    UPDATE_COLUMNS:
    - c_custkey
    MAPPING:
    - NAME: c_custkey
      EXPRESSION: c_custkey
  COMMIT:
    MAX_ROW: 1000
    MINIMAL_INTERVAL: 1000
    CONSISTENCY: ATLEAST
  POLL:
    BATCHSIZE: 1000
    TIMEOUT: 1000
  PROPERTIES:
    group.id: ziyuan_example_01
",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.alicloud.vpc.Network;
import com.pulumi.alicloud.vpc.NetworkArgs;
import com.pulumi.alicloud.vpc.Switch;
import com.pulumi.alicloud.vpc.SwitchArgs;
import com.pulumi.alicloud.gpdb.Instance;
import com.pulumi.alicloud.gpdb.InstanceArgs;
import com.pulumi.alicloud.gpdb.StreamingDataService;
import com.pulumi.alicloud.gpdb.StreamingDataServiceArgs;
import com.pulumi.alicloud.gpdb.StreamingDataSource;
import com.pulumi.alicloud.gpdb.StreamingDataSourceArgs;
import com.pulumi.alicloud.gpdb.StreamingJob;
import com.pulumi.alicloud.gpdb.StreamingJobArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var config = ctx.config();
        final var name = config.get("name").orElse("terraform-example");
        var defaultTXqb15 = new Network("defaultTXqb15", NetworkArgs.builder()
            .cidrBlock("192.168.0.0/16")
            .build());

        var defaultaSWhbT = new Switch("defaultaSWhbT", SwitchArgs.builder()
            .vpcId(defaultTXqb15.id())
            .zoneId("cn-beijing-h")
            .cidrBlock("192.168.1.0/24")
            .build());

        var defaulth2ghc1 = new Instance("defaulth2ghc1", InstanceArgs.builder()
            .instanceSpec("2C8G")
            .description(name)
            .segNodeNum("2")
            .segStorageType("cloud_essd")
            .instanceNetworkType("VPC")
            .dbInstanceCategory("Basic")
            .paymentType("PayAsYouGo")
            .sslEnabled("0")
            .engineVersion("6.0")
            .zoneId("cn-beijing-h")
            .vswitchId(defaultaSWhbT.id())
            .storageSize("50")
            .masterCu("4")
            .vpcId(defaultTXqb15.id())
            .dbInstanceMode("StorageElastic")
            .engine("gpdb")
            .build());

        var default2dUszY = new StreamingDataService("default2dUszY", StreamingDataServiceArgs.builder()
            .serviceName("example")
            .dbInstanceId(defaulth2ghc1.id())
            .serviceDescription("example")
            .serviceSpec("8")
            .build());

        var defaultcDQItu = new StreamingDataSource("defaultcDQItu", StreamingDataSourceArgs.builder()
            .dbInstanceId(defaulth2ghc1.id())
            .dataSourceName("example")
            .dataSourceConfig(serializeJson(
                jsonObject(
                    jsonProperty("brokers", "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092"),
                    jsonProperty("delimiter", "|"),
                    jsonProperty("format", "delimited"),
                    jsonProperty("topic", "ziyuan_example")
                )))
            .dataSourceType("kafka")
            .dataSourceDescription("example")
            .serviceId(default2dUszY.serviceId())
            .build());

        var default_ = new StreamingJob("default", StreamingJobArgs.builder()
            .account("example_001")
            .destSchema("public")
            .mode("professional")
            .jobName("example-kafka")
            .jobDescription("example-kafka")
            .destDatabase("adb_sampledata_tpch")
            .dbInstanceId(defaulth2ghc1.id())
            .destTable("customer")
            .dataSourceId(defaultcDQItu.dataSourceId())
            .password("example_001")
            .jobConfig("""
ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
  INPUT:
    SOURCE:
      BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
      TOPIC: ziyuan_example
      FALLBACK_OFFSET: LATEST
    KEY:
      COLUMNS:
      - NAME: c_custkey
        TYPE: int
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    VALUE:
      COLUMNS:
      - NAME: c_comment
        TYPE: varchar
      FORMAT: delimited
      DELIMITED_OPTION:
        DELIMITER: \'|\'
    ERROR_LIMIT: 10
  OUTPUT:
    SCHEMA: public
    TABLE: customer
    MODE: MERGE
    MATCH_COLUMNS:
    - c_custkey
    ORDER_COLUMNS:
    - c_custkey
    UPDATE_COLUMNS:
    - c_custkey
    MAPPING:
    - NAME: c_custkey
      EXPRESSION: c_custkey
  COMMIT:
    MAX_ROW: 1000
    MINIMAL_INTERVAL: 1000
    CONSISTENCY: ATLEAST
  POLL:
    BATCHSIZE: 1000
    TIMEOUT: 1000
  PROPERTIES:
    group.id: ziyuan_example_01
            """)
            .build());

    }
}
Copy
configuration:
  name:
    type: string
    default: terraform-example
resources:
  defaultTXqb15:
    type: alicloud:vpc:Network
    properties:
      cidrBlock: 192.168.0.0/16
  defaultaSWhbT:
    type: alicloud:vpc:Switch
    properties:
      vpcId: ${defaultTXqb15.id}
      zoneId: cn-beijing-h
      cidrBlock: 192.168.1.0/24
  defaulth2ghc1:
    type: alicloud:gpdb:Instance
    properties:
      instanceSpec: 2C8G
      description: ${name}
      segNodeNum: '2'
      segStorageType: cloud_essd
      instanceNetworkType: VPC
      dbInstanceCategory: Basic
      paymentType: PayAsYouGo
      sslEnabled: '0'
      engineVersion: '6.0'
      zoneId: cn-beijing-h
      vswitchId: ${defaultaSWhbT.id}
      storageSize: '50'
      masterCu: '4'
      vpcId: ${defaultTXqb15.id}
      dbInstanceMode: StorageElastic
      engine: gpdb
  default2dUszY:
    type: alicloud:gpdb:StreamingDataService
    properties:
      serviceName: example
      dbInstanceId: ${defaulth2ghc1.id}
      serviceDescription: example
      serviceSpec: '8'
  defaultcDQItu:
    type: alicloud:gpdb:StreamingDataSource
    properties:
      dbInstanceId: ${defaulth2ghc1.id}
      dataSourceName: example
      dataSourceConfig:
        fn::toJSON:
          brokers: alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092
          delimiter: '|'
          format: delimited
          topic: ziyuan_example
      dataSourceType: kafka
      dataSourceDescription: example
      serviceId: ${default2dUszY.serviceId}
  default:
    type: alicloud:gpdb:StreamingJob
    properties:
      account: example_001
      destSchema: public
      mode: professional
      jobName: example-kafka
      jobDescription: example-kafka
      destDatabase: adb_sampledata_tpch
      dbInstanceId: ${defaulth2ghc1.id}
      destTable: customer
      dataSourceId: ${defaultcDQItu.dataSourceId}
      password: example_001
      jobConfig: |
        ATABASE: adb_sampledata_tpch
        USER: example_001
        PASSWORD: example_001
        HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
        PORT: 5432
        KAFKA:
          INPUT:
            SOURCE:
              BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
              TOPIC: ziyuan_example
              FALLBACK_OFFSET: LATEST
            KEY:
              COLUMNS:
              - NAME: c_custkey
                TYPE: int
              FORMAT: delimited
              DELIMITED_OPTION:
                DELIMITER: \'|\'
            VALUE:
              COLUMNS:
              - NAME: c_comment
                TYPE: varchar
              FORMAT: delimited
              DELIMITED_OPTION:
                DELIMITER: \'|\'
            ERROR_LIMIT: 10
          OUTPUT:
            SCHEMA: public
            TABLE: customer
            MODE: MERGE
            MATCH_COLUMNS:
            - c_custkey
            ORDER_COLUMNS:
            - c_custkey
            UPDATE_COLUMNS:
            - c_custkey
            MAPPING:
            - NAME: c_custkey
              EXPRESSION: c_custkey
          COMMIT:
            MAX_ROW: 1000
            MINIMAL_INTERVAL: 1000
            CONSISTENCY: ATLEAST
          POLL:
            BATCHSIZE: 1000
            TIMEOUT: 1000
          PROPERTIES:
            group.id: ziyuan_example_01        
Copy

Create StreamingJob Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new StreamingJob(name: string, args: StreamingJobArgs, opts?: CustomResourceOptions);
@overload
def StreamingJob(resource_name: str,
                 args: StreamingJobArgs,
                 opts: Optional[ResourceOptions] = None)

@overload
def StreamingJob(resource_name: str,
                 opts: Optional[ResourceOptions] = None,
                 db_instance_id: Optional[str] = None,
                 job_name: Optional[str] = None,
                 data_source_id: Optional[str] = None,
                 group_name: Optional[str] = None,
                 job_description: Optional[str] = None,
                 dest_database: Optional[str] = None,
                 dest_schema: Optional[str] = None,
                 dest_table: Optional[str] = None,
                 error_limit_count: Optional[int] = None,
                 fallback_offset: Optional[str] = None,
                 account: Optional[str] = None,
                 job_config: Optional[str] = None,
                 dest_columns: Optional[Sequence[str]] = None,
                 consistency: Optional[str] = None,
                 match_columns: Optional[Sequence[str]] = None,
                 mode: Optional[str] = None,
                 password: Optional[str] = None,
                 src_columns: Optional[Sequence[str]] = None,
                 try_run: Optional[bool] = None,
                 update_columns: Optional[Sequence[str]] = None,
                 write_mode: Optional[str] = None)
func NewStreamingJob(ctx *Context, name string, args StreamingJobArgs, opts ...ResourceOption) (*StreamingJob, error)
public StreamingJob(string name, StreamingJobArgs args, CustomResourceOptions? opts = null)
public StreamingJob(String name, StreamingJobArgs args)
public StreamingJob(String name, StreamingJobArgs args, CustomResourceOptions options)
type: alicloud:gpdb:StreamingJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. StreamingJobArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. StreamingJobArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. StreamingJobArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. StreamingJobArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. StreamingJobArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var streamingJobResource = new AliCloud.Gpdb.StreamingJob("streamingJobResource", new()
{
    DbInstanceId = "string",
    JobName = "string",
    DataSourceId = "string",
    GroupName = "string",
    JobDescription = "string",
    DestDatabase = "string",
    DestSchema = "string",
    DestTable = "string",
    ErrorLimitCount = 0,
    FallbackOffset = "string",
    Account = "string",
    JobConfig = "string",
    DestColumns = new[]
    {
        "string",
    },
    Consistency = "string",
    MatchColumns = new[]
    {
        "string",
    },
    Mode = "string",
    Password = "string",
    SrcColumns = new[]
    {
        "string",
    },
    TryRun = false,
    UpdateColumns = new[]
    {
        "string",
    },
    WriteMode = "string",
});
Copy
example, err := gpdb.NewStreamingJob(ctx, "streamingJobResource", &gpdb.StreamingJobArgs{
	DbInstanceId:    pulumi.String("string"),
	JobName:         pulumi.String("string"),
	DataSourceId:    pulumi.String("string"),
	GroupName:       pulumi.String("string"),
	JobDescription:  pulumi.String("string"),
	DestDatabase:    pulumi.String("string"),
	DestSchema:      pulumi.String("string"),
	DestTable:       pulumi.String("string"),
	ErrorLimitCount: pulumi.Int(0),
	FallbackOffset:  pulumi.String("string"),
	Account:         pulumi.String("string"),
	JobConfig:       pulumi.String("string"),
	DestColumns: pulumi.StringArray{
		pulumi.String("string"),
	},
	Consistency: pulumi.String("string"),
	MatchColumns: pulumi.StringArray{
		pulumi.String("string"),
	},
	Mode:     pulumi.String("string"),
	Password: pulumi.String("string"),
	SrcColumns: pulumi.StringArray{
		pulumi.String("string"),
	},
	TryRun: pulumi.Bool(false),
	UpdateColumns: pulumi.StringArray{
		pulumi.String("string"),
	},
	WriteMode: pulumi.String("string"),
})
Copy
var streamingJobResource = new StreamingJob("streamingJobResource", StreamingJobArgs.builder()
    .dbInstanceId("string")
    .jobName("string")
    .dataSourceId("string")
    .groupName("string")
    .jobDescription("string")
    .destDatabase("string")
    .destSchema("string")
    .destTable("string")
    .errorLimitCount(0)
    .fallbackOffset("string")
    .account("string")
    .jobConfig("string")
    .destColumns("string")
    .consistency("string")
    .matchColumns("string")
    .mode("string")
    .password("string")
    .srcColumns("string")
    .tryRun(false)
    .updateColumns("string")
    .writeMode("string")
    .build());
Copy
streaming_job_resource = alicloud.gpdb.StreamingJob("streamingJobResource",
    db_instance_id="string",
    job_name="string",
    data_source_id="string",
    group_name="string",
    job_description="string",
    dest_database="string",
    dest_schema="string",
    dest_table="string",
    error_limit_count=0,
    fallback_offset="string",
    account="string",
    job_config="string",
    dest_columns=["string"],
    consistency="string",
    match_columns=["string"],
    mode="string",
    password="string",
    src_columns=["string"],
    try_run=False,
    update_columns=["string"],
    write_mode="string")
Copy
const streamingJobResource = new alicloud.gpdb.StreamingJob("streamingJobResource", {
    dbInstanceId: "string",
    jobName: "string",
    dataSourceId: "string",
    groupName: "string",
    jobDescription: "string",
    destDatabase: "string",
    destSchema: "string",
    destTable: "string",
    errorLimitCount: 0,
    fallbackOffset: "string",
    account: "string",
    jobConfig: "string",
    destColumns: ["string"],
    consistency: "string",
    matchColumns: ["string"],
    mode: "string",
    password: "string",
    srcColumns: ["string"],
    tryRun: false,
    updateColumns: ["string"],
    writeMode: "string",
});
Copy
type: alicloud:gpdb:StreamingJob
properties:
    account: string
    consistency: string
    dataSourceId: string
    dbInstanceId: string
    destColumns:
        - string
    destDatabase: string
    destSchema: string
    destTable: string
    errorLimitCount: 0
    fallbackOffset: string
    groupName: string
    jobConfig: string
    jobDescription: string
    jobName: string
    matchColumns:
        - string
    mode: string
    password: string
    srcColumns:
        - string
    tryRun: false
    updateColumns:
        - string
    writeMode: string
Copy

StreamingJob Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The StreamingJob resource accepts the following input properties:

DataSourceId
This property is required.
Changes to this property will trigger replacement.
string
The data source ID.
DbInstanceId
This property is required.
Changes to this property will trigger replacement.
string
The instance ID.
JobName
This property is required.
Changes to this property will trigger replacement.
string
The name of the job.
Account string
The name of the database account.
Consistency string

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
DestColumns List<string>
Target Field
DestDatabase string
The name of the destination database.
DestSchema string
Target Schema
DestTable string
The name of the destination table.
ErrorLimitCount int
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
FallbackOffset string
Automatic offset reset
GroupName string
Group Name
JobConfig string
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
JobDescription string
The description of the job.
MatchColumns List<string>
Match Field
Mode Changes to this property will trigger replacement. string

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

Password string
The password of the database account.
SrcColumns List<string>
Source Field
TryRun bool

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

UpdateColumns List<string>
Update Field
WriteMode string

The write mode.

Valid values:

  • insert
  • update
  • merge
DataSourceId
This property is required.
Changes to this property will trigger replacement.
string
The data source ID.
DbInstanceId
This property is required.
Changes to this property will trigger replacement.
string
The instance ID.
JobName
This property is required.
Changes to this property will trigger replacement.
string
The name of the job.
Account string
The name of the database account.
Consistency string

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
DestColumns []string
Target Field
DestDatabase string
The name of the destination database.
DestSchema string
Target Schema
DestTable string
The name of the destination table.
ErrorLimitCount int
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
FallbackOffset string
Automatic offset reset
GroupName string
Group Name
JobConfig string
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
JobDescription string
The description of the job.
MatchColumns []string
Match Field
Mode Changes to this property will trigger replacement. string

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

Password string
The password of the database account.
SrcColumns []string
Source Field
TryRun bool

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

UpdateColumns []string
Update Field
WriteMode string

The write mode.

Valid values:

  • insert
  • update
  • merge
dataSourceId
This property is required.
Changes to this property will trigger replacement.
String
The data source ID.
dbInstanceId
This property is required.
Changes to this property will trigger replacement.
String
The instance ID.
jobName
This property is required.
Changes to this property will trigger replacement.
String
The name of the job.
account String
The name of the database account.
consistency String

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
destColumns List<String>
Target Field
destDatabase String
The name of the destination database.
destSchema String
Target Schema
destTable String
The name of the destination table.
errorLimitCount Integer
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallbackOffset String
Automatic offset reset
groupName String
Group Name
jobConfig String
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
jobDescription String
The description of the job.
matchColumns List<String>
Match Field
mode Changes to this property will trigger replacement. String

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password String
The password of the database account.
srcColumns List<String>
Source Field
tryRun Boolean

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

updateColumns List<String>
Update Field
writeMode String

The write mode.

Valid values:

  • insert
  • update
  • merge
dataSourceId
This property is required.
Changes to this property will trigger replacement.
string
The data source ID.
dbInstanceId
This property is required.
Changes to this property will trigger replacement.
string
The instance ID.
jobName
This property is required.
Changes to this property will trigger replacement.
string
The name of the job.
account string
The name of the database account.
consistency string

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
destColumns string[]
Target Field
destDatabase string
The name of the destination database.
destSchema string
Target Schema
destTable string
The name of the destination table.
errorLimitCount number
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallbackOffset string
Automatic offset reset
groupName string
Group Name
jobConfig string
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
jobDescription string
The description of the job.
matchColumns string[]
Match Field
mode Changes to this property will trigger replacement. string

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password string
The password of the database account.
srcColumns string[]
Source Field
tryRun boolean

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

updateColumns string[]
Update Field
writeMode string

The write mode.

Valid values:

  • insert
  • update
  • merge
data_source_id
This property is required.
Changes to this property will trigger replacement.
str
The data source ID.
db_instance_id
This property is required.
Changes to this property will trigger replacement.
str
The instance ID.
job_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the job.
account str
The name of the database account.
consistency str

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
dest_columns Sequence[str]
Target Field
dest_database str
The name of the destination database.
dest_schema str
Target Schema
dest_table str
The name of the destination table.
error_limit_count int
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallback_offset str
Automatic offset reset
group_name str
Group Name
job_config str
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
job_description str
The description of the job.
match_columns Sequence[str]
Match Field
mode Changes to this property will trigger replacement. str

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password str
The password of the database account.
src_columns Sequence[str]
Source Field
try_run bool

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

update_columns Sequence[str]
Update Field
write_mode str

The write mode.

Valid values:

  • insert
  • update
  • merge
dataSourceId
This property is required.
Changes to this property will trigger replacement.
String
The data source ID.
dbInstanceId
This property is required.
Changes to this property will trigger replacement.
String
The instance ID.
jobName
This property is required.
Changes to this property will trigger replacement.
String
The name of the job.
account String
The name of the database account.
consistency String

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
destColumns List<String>
Target Field
destDatabase String
The name of the destination database.
destSchema String
Target Schema
destTable String
The name of the destination table.
errorLimitCount Number
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallbackOffset String
Automatic offset reset
groupName String
Group Name
jobConfig String
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
jobDescription String
The description of the job.
matchColumns List<String>
Match Field
mode Changes to this property will trigger replacement. String

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password String
The password of the database account.
srcColumns List<String>
Source Field
tryRun Boolean

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

updateColumns List<String>
Update Field
writeMode String

The write mode.

Valid values:

  • insert
  • update
  • merge

Outputs

All input properties are implicitly available as output properties. Additionally, the StreamingJob resource produces the following output properties:

CreateTime string
The creation time of the resource
Id string
The provider-assigned unique ID for this managed resource.
JobId string
The job ID.
Status string
Service status, value:
CreateTime string
The creation time of the resource
Id string
The provider-assigned unique ID for this managed resource.
JobId string
The job ID.
Status string
Service status, value:
createTime String
The creation time of the resource
id String
The provider-assigned unique ID for this managed resource.
jobId String
The job ID.
status String
Service status, value:
createTime string
The creation time of the resource
id string
The provider-assigned unique ID for this managed resource.
jobId string
The job ID.
status string
Service status, value:
create_time str
The creation time of the resource
id str
The provider-assigned unique ID for this managed resource.
job_id str
The job ID.
status str
Service status, value:
createTime String
The creation time of the resource
id String
The provider-assigned unique ID for this managed resource.
jobId String
The job ID.
status String
Service status, value:

Look up Existing StreamingJob Resource

Get an existing StreamingJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: StreamingJobState, opts?: CustomResourceOptions): StreamingJob
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        account: Optional[str] = None,
        consistency: Optional[str] = None,
        create_time: Optional[str] = None,
        data_source_id: Optional[str] = None,
        db_instance_id: Optional[str] = None,
        dest_columns: Optional[Sequence[str]] = None,
        dest_database: Optional[str] = None,
        dest_schema: Optional[str] = None,
        dest_table: Optional[str] = None,
        error_limit_count: Optional[int] = None,
        fallback_offset: Optional[str] = None,
        group_name: Optional[str] = None,
        job_config: Optional[str] = None,
        job_description: Optional[str] = None,
        job_id: Optional[str] = None,
        job_name: Optional[str] = None,
        match_columns: Optional[Sequence[str]] = None,
        mode: Optional[str] = None,
        password: Optional[str] = None,
        src_columns: Optional[Sequence[str]] = None,
        status: Optional[str] = None,
        try_run: Optional[bool] = None,
        update_columns: Optional[Sequence[str]] = None,
        write_mode: Optional[str] = None) -> StreamingJob
func GetStreamingJob(ctx *Context, name string, id IDInput, state *StreamingJobState, opts ...ResourceOption) (*StreamingJob, error)
public static StreamingJob Get(string name, Input<string> id, StreamingJobState? state, CustomResourceOptions? opts = null)
public static StreamingJob get(String name, Output<String> id, StreamingJobState state, CustomResourceOptions options)
resources:  _:    type: alicloud:gpdb:StreamingJob    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Account string
The name of the database account.
Consistency string

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
CreateTime string
The creation time of the resource
DataSourceId Changes to this property will trigger replacement. string
The data source ID.
DbInstanceId Changes to this property will trigger replacement. string
The instance ID.
DestColumns List<string>
Target Field
DestDatabase string
The name of the destination database.
DestSchema string
Target Schema
DestTable string
The name of the destination table.
ErrorLimitCount int
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
FallbackOffset string
Automatic offset reset
GroupName string
Group Name
JobConfig string
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
JobDescription string
The description of the job.
JobId string
The job ID.
JobName Changes to this property will trigger replacement. string
The name of the job.
MatchColumns List<string>
Match Field
Mode Changes to this property will trigger replacement. string

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

Password string
The password of the database account.
SrcColumns List<string>
Source Field
Status string
Service status, value:
TryRun bool

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

UpdateColumns List<string>
Update Field
WriteMode string

The write mode.

Valid values:

  • insert
  • update
  • merge
Account string
The name of the database account.
Consistency string

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
CreateTime string
The creation time of the resource
DataSourceId Changes to this property will trigger replacement. string
The data source ID.
DbInstanceId Changes to this property will trigger replacement. string
The instance ID.
DestColumns []string
Target Field
DestDatabase string
The name of the destination database.
DestSchema string
Target Schema
DestTable string
The name of the destination table.
ErrorLimitCount int
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
FallbackOffset string
Automatic offset reset
GroupName string
Group Name
JobConfig string
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
JobDescription string
The description of the job.
JobId string
The job ID.
JobName Changes to this property will trigger replacement. string
The name of the job.
MatchColumns []string
Match Field
Mode Changes to this property will trigger replacement. string

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

Password string
The password of the database account.
SrcColumns []string
Source Field
Status string
Service status, value:
TryRun bool

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

UpdateColumns []string
Update Field
WriteMode string

The write mode.

Valid values:

  • insert
  • update
  • merge
account String
The name of the database account.
consistency String

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
createTime String
The creation time of the resource
dataSourceId Changes to this property will trigger replacement. String
The data source ID.
dbInstanceId Changes to this property will trigger replacement. String
The instance ID.
destColumns List<String>
Target Field
destDatabase String
The name of the destination database.
destSchema String
Target Schema
destTable String
The name of the destination table.
errorLimitCount Integer
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallbackOffset String
Automatic offset reset
groupName String
Group Name
jobConfig String
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
jobDescription String
The description of the job.
jobId String
The job ID.
jobName Changes to this property will trigger replacement. String
The name of the job.
matchColumns List<String>
Match Field
mode Changes to this property will trigger replacement. String

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password String
The password of the database account.
srcColumns List<String>
Source Field
status String
Service status, value:
tryRun Boolean

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

updateColumns List<String>
Update Field
writeMode String

The write mode.

Valid values:

  • insert
  • update
  • merge
account string
The name of the database account.
consistency string

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
createTime string
The creation time of the resource
dataSourceId Changes to this property will trigger replacement. string
The data source ID.
dbInstanceId Changes to this property will trigger replacement. string
The instance ID.
destColumns string[]
Target Field
destDatabase string
The name of the destination database.
destSchema string
Target Schema
destTable string
The name of the destination table.
errorLimitCount number
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallbackOffset string
Automatic offset reset
groupName string
Group Name
jobConfig string
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
jobDescription string
The description of the job.
jobId string
The job ID.
jobName Changes to this property will trigger replacement. string
The name of the job.
matchColumns string[]
Match Field
mode Changes to this property will trigger replacement. string

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password string
The password of the database account.
srcColumns string[]
Source Field
status string
Service status, value:
tryRun boolean

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

updateColumns string[]
Update Field
writeMode string

The write mode.

Valid values:

  • insert
  • update
  • merge
account str
The name of the database account.
consistency str

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
create_time str
The creation time of the resource
data_source_id Changes to this property will trigger replacement. str
The data source ID.
db_instance_id Changes to this property will trigger replacement. str
The instance ID.
dest_columns Sequence[str]
Target Field
dest_database str
The name of the destination database.
dest_schema str
Target Schema
dest_table str
The name of the destination table.
error_limit_count int
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallback_offset str
Automatic offset reset
group_name str
Group Name
job_config str
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
job_description str
The description of the job.
job_id str
The job ID.
job_name Changes to this property will trigger replacement. str
The name of the job.
match_columns Sequence[str]
Match Field
mode Changes to this property will trigger replacement. str

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password str
The password of the database account.
src_columns Sequence[str]
Source Field
status str
Service status, value:
try_run bool

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

update_columns Sequence[str]
Update Field
write_mode str

The write mode.

Valid values:

  • insert
  • update
  • merge
account String
The name of the database account.
consistency String

The delivery guarantee setting.

Valid values:

  • ATLEAST
  • EXACTLY
createTime String
The creation time of the resource
dataSourceId Changes to this property will trigger replacement. String
The data source ID.
dbInstanceId Changes to this property will trigger replacement. String
The instance ID.
destColumns List<String>
Target Field
destDatabase String
The name of the destination database.
destSchema String
Target Schema
destTable String
The name of the destination table.
errorLimitCount Number
The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
fallbackOffset String
Automatic offset reset
groupName String
Group Name
jobConfig String
The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
jobDescription String
The description of the job.
jobId String
The job ID.
jobName Changes to this property will trigger replacement. String
The name of the job.
matchColumns List<String>
Match Field
mode Changes to this property will trigger replacement. String

The configuration mode. Valid values:

  1. basic: In basic mode, you must configure the configuration parameters.

  2. professional: In professional mode, you can submit a YAML configuration file.

password String
The password of the database account.
srcColumns List<String>
Source Field
status String
Service status, value:
tryRun Boolean

Specifies whether to test the real-time job. Valid values:

  • true
  • false

Default value: false.

updateColumns List<String>
Update Field
writeMode String

The write mode.

Valid values:

  • insert
  • update
  • merge

Import

GPDB Streaming Job can be imported using the id, e.g.

$ pulumi import alicloud:gpdb/streamingJob:StreamingJob example <db_instance_id>:<job_id>
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Alibaba Cloud pulumi/pulumi-alicloud
License
Apache-2.0
Notes
This Pulumi package is based on the alicloud Terraform Provider.