1. Packages
  2. Flexibleengine Provider
  3. API Docs
  4. DliSqlJob
flexibleengine 1.46.0 published on Monday, Apr 14, 2025 by flexibleenginecloud

flexibleengine.DliSqlJob

Explore with Pulumi AI

Manages DLI SQL job resource within FlexibleEngine

Example Usage

Create a Sql job

import * as pulumi from "@pulumi/pulumi";
import * as flexibleengine from "@pulumi/flexibleengine";

const config = new pulumi.Config();
const databaseName = config.requireObject("databaseName");
const queueName = config.requireObject("queueName");
const sql = config.requireObject("sql");
const test = new flexibleengine.DliSqlJob("test", {
    sql: sql,
    databaseName: databaseName,
    queueName: queueName,
});
Copy
import pulumi
import pulumi_flexibleengine as flexibleengine

config = pulumi.Config()
database_name = config.require_object("databaseName")
queue_name = config.require_object("queueName")
sql = config.require_object("sql")
test = flexibleengine.DliSqlJob("test",
    sql=sql,
    database_name=database_name,
    queue_name=queue_name)
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/flexibleengine/flexibleengine"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		cfg := config.New(ctx, "")
		databaseName := cfg.RequireObject("databaseName")
		queueName := cfg.RequireObject("queueName")
		sql := cfg.RequireObject("sql")
		_, err := flexibleengine.NewDliSqlJob(ctx, "test", &flexibleengine.DliSqlJobArgs{
			Sql:          pulumi.Any(sql),
			DatabaseName: pulumi.Any(databaseName),
			QueueName:    pulumi.Any(queueName),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Flexibleengine = Pulumi.Flexibleengine;

return await Deployment.RunAsync(() => 
{
    var config = new Config();
    var databaseName = config.RequireObject<dynamic>("databaseName");
    var queueName = config.RequireObject<dynamic>("queueName");
    var sql = config.RequireObject<dynamic>("sql");
    var test = new Flexibleengine.DliSqlJob("test", new()
    {
        Sql = sql,
        DatabaseName = databaseName,
        QueueName = queueName,
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.flexibleengine.DliSqlJob;
import com.pulumi.flexibleengine.DliSqlJobArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var config = ctx.config();
        final var databaseName = config.get("databaseName");
        final var queueName = config.get("queueName");
        final var sql = config.get("sql");
        var test = new DliSqlJob("test", DliSqlJobArgs.builder()
            .sql(sql)
            .databaseName(databaseName)
            .queueName(queueName)
            .build());

    }
}
Copy
configuration:
  databaseName:
    type: dynamic
  queueName:
    type: dynamic
  sql:
    type: dynamic
resources:
  test:
    type: flexibleengine:DliSqlJob
    properties:
      sql: ${sql}
      databaseName: ${databaseName}
      queueName: ${queueName}
Copy

Create DliSqlJob Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new DliSqlJob(name: string, args: DliSqlJobArgs, opts?: CustomResourceOptions);
@overload
def DliSqlJob(resource_name: str,
              args: DliSqlJobArgs,
              opts: Optional[ResourceOptions] = None)

@overload
def DliSqlJob(resource_name: str,
              opts: Optional[ResourceOptions] = None,
              sql: Optional[str] = None,
              conf: Optional[DliSqlJobConfArgs] = None,
              database_name: Optional[str] = None,
              dli_sql_job_id: Optional[str] = None,
              queue_name: Optional[str] = None,
              region: Optional[str] = None,
              tags: Optional[Mapping[str, str]] = None,
              timeouts: Optional[DliSqlJobTimeoutsArgs] = None)
func NewDliSqlJob(ctx *Context, name string, args DliSqlJobArgs, opts ...ResourceOption) (*DliSqlJob, error)
public DliSqlJob(string name, DliSqlJobArgs args, CustomResourceOptions? opts = null)
public DliSqlJob(String name, DliSqlJobArgs args)
public DliSqlJob(String name, DliSqlJobArgs args, CustomResourceOptions options)
type: flexibleengine:DliSqlJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. DliSqlJobArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. DliSqlJobArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. DliSqlJobArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. DliSqlJobArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. DliSqlJobArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var dliSqlJobResource = new Flexibleengine.DliSqlJob("dliSqlJobResource", new()
{
    Sql = "string",
    Conf = new Flexibleengine.Inputs.DliSqlJobConfArgs
    {
        DliSqlJobTimeout = 0,
        DliSqlSqlasyncEnabled = false,
        SparkSqlAutoBroadcastJoinThreshold = 0,
        SparkSqlBadRecordsPath = "string",
        SparkSqlDynamicPartitionOverwriteEnabled = false,
        SparkSqlFilesMaxPartitionBytes = 0,
        SparkSqlMaxRecordsPerFile = 0,
        SparkSqlShufflePartitions = 0,
    },
    DatabaseName = "string",
    DliSqlJobId = "string",
    QueueName = "string",
    Region = "string",
    Tags = 
    {
        { "string", "string" },
    },
    Timeouts = new Flexibleengine.Inputs.DliSqlJobTimeoutsArgs
    {
        Create = "string",
        Delete = "string",
    },
});
Copy
example, err := flexibleengine.NewDliSqlJob(ctx, "dliSqlJobResource", &flexibleengine.DliSqlJobArgs{
Sql: pulumi.String("string"),
Conf: &.DliSqlJobConfArgs{
DliSqlJobTimeout: pulumi.Float64(0),
DliSqlSqlasyncEnabled: pulumi.Bool(false),
SparkSqlAutoBroadcastJoinThreshold: pulumi.Float64(0),
SparkSqlBadRecordsPath: pulumi.String("string"),
SparkSqlDynamicPartitionOverwriteEnabled: pulumi.Bool(false),
SparkSqlFilesMaxPartitionBytes: pulumi.Float64(0),
SparkSqlMaxRecordsPerFile: pulumi.Float64(0),
SparkSqlShufflePartitions: pulumi.Float64(0),
},
DatabaseName: pulumi.String("string"),
DliSqlJobId: pulumi.String("string"),
QueueName: pulumi.String("string"),
Region: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
Timeouts: &.DliSqlJobTimeoutsArgs{
Create: pulumi.String("string"),
Delete: pulumi.String("string"),
},
})
Copy
var dliSqlJobResource = new DliSqlJob("dliSqlJobResource", DliSqlJobArgs.builder()
    .sql("string")
    .conf(DliSqlJobConfArgs.builder()
        .dliSqlJobTimeout(0)
        .dliSqlSqlasyncEnabled(false)
        .sparkSqlAutoBroadcastJoinThreshold(0)
        .sparkSqlBadRecordsPath("string")
        .sparkSqlDynamicPartitionOverwriteEnabled(false)
        .sparkSqlFilesMaxPartitionBytes(0)
        .sparkSqlMaxRecordsPerFile(0)
        .sparkSqlShufflePartitions(0)
        .build())
    .databaseName("string")
    .dliSqlJobId("string")
    .queueName("string")
    .region("string")
    .tags(Map.of("string", "string"))
    .timeouts(DliSqlJobTimeoutsArgs.builder()
        .create("string")
        .delete("string")
        .build())
    .build());
Copy
dli_sql_job_resource = flexibleengine.DliSqlJob("dliSqlJobResource",
    sql="string",
    conf={
        "dli_sql_job_timeout": 0,
        "dli_sql_sqlasync_enabled": False,
        "spark_sql_auto_broadcast_join_threshold": 0,
        "spark_sql_bad_records_path": "string",
        "spark_sql_dynamic_partition_overwrite_enabled": False,
        "spark_sql_files_max_partition_bytes": 0,
        "spark_sql_max_records_per_file": 0,
        "spark_sql_shuffle_partitions": 0,
    },
    database_name="string",
    dli_sql_job_id="string",
    queue_name="string",
    region="string",
    tags={
        "string": "string",
    },
    timeouts={
        "create": "string",
        "delete": "string",
    })
Copy
const dliSqlJobResource = new flexibleengine.DliSqlJob("dliSqlJobResource", {
    sql: "string",
    conf: {
        dliSqlJobTimeout: 0,
        dliSqlSqlasyncEnabled: false,
        sparkSqlAutoBroadcastJoinThreshold: 0,
        sparkSqlBadRecordsPath: "string",
        sparkSqlDynamicPartitionOverwriteEnabled: false,
        sparkSqlFilesMaxPartitionBytes: 0,
        sparkSqlMaxRecordsPerFile: 0,
        sparkSqlShufflePartitions: 0,
    },
    databaseName: "string",
    dliSqlJobId: "string",
    queueName: "string",
    region: "string",
    tags: {
        string: "string",
    },
    timeouts: {
        create: "string",
        "delete": "string",
    },
});
Copy
type: flexibleengine:DliSqlJob
properties:
    conf:
        dliSqlJobTimeout: 0
        dliSqlSqlasyncEnabled: false
        sparkSqlAutoBroadcastJoinThreshold: 0
        sparkSqlBadRecordsPath: string
        sparkSqlDynamicPartitionOverwriteEnabled: false
        sparkSqlFilesMaxPartitionBytes: 0
        sparkSqlMaxRecordsPerFile: 0
        sparkSqlShufflePartitions: 0
    databaseName: string
    dliSqlJobId: string
    queueName: string
    region: string
    sql: string
    tags:
        string: string
    timeouts:
        create: string
        delete: string
Copy

DliSqlJob Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The DliSqlJob resource accepts the following input properties:

Sql This property is required. string
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
Conf DliSqlJobConf

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

DatabaseName string
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
DliSqlJobId string
Indicates a resource ID in UUID format.
QueueName string
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
Region string
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
Tags Dictionary<string, string>
Specifies label of a Job. Changing this parameter will create a new resource.
Timeouts DliSqlJobTimeouts
Sql This property is required. string
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
Conf DliSqlJobConfArgs

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

DatabaseName string
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
DliSqlJobId string
Indicates a resource ID in UUID format.
QueueName string
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
Region string
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
Tags map[string]string
Specifies label of a Job. Changing this parameter will create a new resource.
Timeouts DliSqlJobTimeoutsArgs
sql This property is required. String
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
conf DliSqlJobConf

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

databaseName String
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dliSqlJobId String
Indicates a resource ID in UUID format.
queueName String
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region String
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
tags Map<String,String>
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts DliSqlJobTimeouts
sql This property is required. string
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
conf DliSqlJobConf

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

databaseName string
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dliSqlJobId string
Indicates a resource ID in UUID format.
queueName string
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region string
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
tags {[key: string]: string}
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts DliSqlJobTimeouts
sql This property is required. str
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
conf DliSqlJobConfArgs

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

database_name str
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dli_sql_job_id str
Indicates a resource ID in UUID format.
queue_name str
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region str
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
tags Mapping[str, str]
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts DliSqlJobTimeoutsArgs
sql This property is required. String
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
conf Property Map

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

databaseName String
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dliSqlJobId String
Indicates a resource ID in UUID format.
queueName String
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region String
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
tags Map<String>
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts Property Map

Outputs

All input properties are implicitly available as output properties. Additionally, the DliSqlJob resource produces the following output properties:

Duration double
Job running duration (unit: millisecond).
Id string
The provider-assigned unique ID for this managed resource.
JobType string
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
Owner string
User who submits a job.
Rows List<ImmutableArray<string>>
When the statement type is DDL, results of the DDL are displayed.
Schemas List<ImmutableDictionary<string, string>>
When the statement type is DDL, the column name and type of DDL are displayed.
StartTime string
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
Status string
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
Duration float64
Job running duration (unit: millisecond).
Id string
The provider-assigned unique ID for this managed resource.
JobType string
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
Owner string
User who submits a job.
Rows [][]string
When the statement type is DDL, results of the DDL are displayed.
Schemas []map[string]string
When the statement type is DDL, the column name and type of DDL are displayed.
StartTime string
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
Status string
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
duration Double
Job running duration (unit: millisecond).
id String
The provider-assigned unique ID for this managed resource.
jobType String
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner String
User who submits a job.
rows List<List<String>>
When the statement type is DDL, results of the DDL are displayed.
schemas List<Map<String,String>>
When the statement type is DDL, the column name and type of DDL are displayed.
startTime String
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status String
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
duration number
Job running duration (unit: millisecond).
id string
The provider-assigned unique ID for this managed resource.
jobType string
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner string
User who submits a job.
rows string[][]
When the statement type is DDL, results of the DDL are displayed.
schemas {[key: string]: string}[]
When the statement type is DDL, the column name and type of DDL are displayed.
startTime string
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status string
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
duration float
Job running duration (unit: millisecond).
id str
The provider-assigned unique ID for this managed resource.
job_type str
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner str
User who submits a job.
rows Sequence[Sequence[str]]
When the statement type is DDL, results of the DDL are displayed.
schemas Sequence[Mapping[str, str]]
When the statement type is DDL, the column name and type of DDL are displayed.
start_time str
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status str
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
duration Number
Job running duration (unit: millisecond).
id String
The provider-assigned unique ID for this managed resource.
jobType String
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner String
User who submits a job.
rows List<List<String>>
When the statement type is DDL, results of the DDL are displayed.
schemas List<Map<String>>
When the statement type is DDL, the column name and type of DDL are displayed.
startTime String
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status String
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.

Look up Existing DliSqlJob Resource

Get an existing DliSqlJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DliSqlJobState, opts?: CustomResourceOptions): DliSqlJob
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        conf: Optional[DliSqlJobConfArgs] = None,
        database_name: Optional[str] = None,
        dli_sql_job_id: Optional[str] = None,
        duration: Optional[float] = None,
        job_type: Optional[str] = None,
        owner: Optional[str] = None,
        queue_name: Optional[str] = None,
        region: Optional[str] = None,
        rows: Optional[Sequence[Sequence[str]]] = None,
        schemas: Optional[Sequence[Mapping[str, str]]] = None,
        sql: Optional[str] = None,
        start_time: Optional[str] = None,
        status: Optional[str] = None,
        tags: Optional[Mapping[str, str]] = None,
        timeouts: Optional[DliSqlJobTimeoutsArgs] = None) -> DliSqlJob
func GetDliSqlJob(ctx *Context, name string, id IDInput, state *DliSqlJobState, opts ...ResourceOption) (*DliSqlJob, error)
public static DliSqlJob Get(string name, Input<string> id, DliSqlJobState? state, CustomResourceOptions? opts = null)
public static DliSqlJob get(String name, Output<String> id, DliSqlJobState state, CustomResourceOptions options)
resources:  _:    type: flexibleengine:DliSqlJob    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Conf DliSqlJobConf

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

DatabaseName string
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
DliSqlJobId string
Indicates a resource ID in UUID format.
Duration double
Job running duration (unit: millisecond).
JobType string
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
Owner string
User who submits a job.
QueueName string
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
Region string
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
Rows List<ImmutableArray<string>>
When the statement type is DDL, results of the DDL are displayed.
Schemas List<ImmutableDictionary<string, string>>
When the statement type is DDL, the column name and type of DDL are displayed.
Sql string
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
StartTime string
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
Status string
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
Tags Dictionary<string, string>
Specifies label of a Job. Changing this parameter will create a new resource.
Timeouts DliSqlJobTimeouts
Conf DliSqlJobConfArgs

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

DatabaseName string
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
DliSqlJobId string
Indicates a resource ID in UUID format.
Duration float64
Job running duration (unit: millisecond).
JobType string
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
Owner string
User who submits a job.
QueueName string
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
Region string
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
Rows [][]string
When the statement type is DDL, results of the DDL are displayed.
Schemas []map[string]string
When the statement type is DDL, the column name and type of DDL are displayed.
Sql string
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
StartTime string
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
Status string
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
Tags map[string]string
Specifies label of a Job. Changing this parameter will create a new resource.
Timeouts DliSqlJobTimeoutsArgs
conf DliSqlJobConf

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

databaseName String
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dliSqlJobId String
Indicates a resource ID in UUID format.
duration Double
Job running duration (unit: millisecond).
jobType String
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner String
User who submits a job.
queueName String
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region String
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
rows List<List<String>>
When the statement type is DDL, results of the DDL are displayed.
schemas List<Map<String,String>>
When the statement type is DDL, the column name and type of DDL are displayed.
sql String
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
startTime String
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status String
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
tags Map<String,String>
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts DliSqlJobTimeouts
conf DliSqlJobConf

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

databaseName string
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dliSqlJobId string
Indicates a resource ID in UUID format.
duration number
Job running duration (unit: millisecond).
jobType string
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner string
User who submits a job.
queueName string
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region string
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
rows string[][]
When the statement type is DDL, results of the DDL are displayed.
schemas {[key: string]: string}[]
When the statement type is DDL, the column name and type of DDL are displayed.
sql string
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
startTime string
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status string
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
tags {[key: string]: string}
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts DliSqlJobTimeouts
conf DliSqlJobConfArgs

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

database_name str
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dli_sql_job_id str
Indicates a resource ID in UUID format.
duration float
Job running duration (unit: millisecond).
job_type str
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner str
User who submits a job.
queue_name str
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region str
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
rows Sequence[Sequence[str]]
When the statement type is DDL, results of the DDL are displayed.
schemas Sequence[Mapping[str, str]]
When the statement type is DDL, the column name and type of DDL are displayed.
sql str
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
start_time str
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status str
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
tags Mapping[str, str]
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts DliSqlJobTimeoutsArgs
conf Property Map

Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.

The conf block supports:

databaseName String
Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
dliSqlJobId String
Indicates a resource ID in UUID format.
duration Number
Job running duration (unit: millisecond).
jobType String
The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
owner String
User who submits a job.
queueName String
Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
region String
Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
rows List<List<String>>
When the statement type is DDL, results of the DDL are displayed.
schemas List<Map<String>>
When the statement type is DDL, the column name and type of DDL are displayed.
sql String
Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
startTime String
Time when a job is started, in RFC-3339 format. e.g. 2019-10-12T07:20:50.52Z
status String
Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
tags Map<String>
Specifies label of a Job. Changing this parameter will create a new resource.
timeouts Property Map

Supporting Types

DliSqlJobConf
, DliSqlJobConfArgs

DliSqlJobTimeout double
Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: ms. Changing this parameter will create a new resource.
DliSqlSqlasyncEnabled bool
Specifies whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled. Default value is false. Changing this parameter will create a new resource.
SparkSqlAutoBroadcastJoinThreshold double
Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display. Default value is 209715200. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource.
SparkSqlBadRecordsPath string
Path of bad records. Changing this parameter will create a new resource.
SparkSqlDynamicPartitionOverwriteEnabled bool
In dynamic mode, Spark does not delete the previous partitions and only overwrites the partitions without data during execution. Default value is false. Changing this parameter will create a new resource.
SparkSqlFilesMaxPartitionBytes double
Maximum number of bytes to be packed into a single partition when a file is read. Default value is 134217728. Changing this parameter will create a new resource.
SparkSqlMaxRecordsPerFile double
Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit. Default value is 0. Changing this parameter will create a new resource.
SparkSqlShufflePartitions double
Default number of partitions used to filter data for join or aggregation. Default value is 4096. Changing this parameter will create a new resource.
DliSqlJobTimeout float64
Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: ms. Changing this parameter will create a new resource.
DliSqlSqlasyncEnabled bool
Specifies whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled. Default value is false. Changing this parameter will create a new resource.
SparkSqlAutoBroadcastJoinThreshold float64
Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display. Default value is 209715200. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource.
SparkSqlBadRecordsPath string
Path of bad records. Changing this parameter will create a new resource.
SparkSqlDynamicPartitionOverwriteEnabled bool
In dynamic mode, Spark does not delete the previous partitions and only overwrites the partitions without data during execution. Default value is false. Changing this parameter will create a new resource.
SparkSqlFilesMaxPartitionBytes float64
Maximum number of bytes to be packed into a single partition when a file is read. Default value is 134217728. Changing this parameter will create a new resource.
SparkSqlMaxRecordsPerFile float64
Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit. Default value is 0. Changing this parameter will create a new resource.
SparkSqlShufflePartitions float64
Default number of partitions used to filter data for join or aggregation. Default value is 4096. Changing this parameter will create a new resource.
dliSqlJobTimeout Double
Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: ms. Changing this parameter will create a new resource.
dliSqlSqlasyncEnabled Boolean
Specifies whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled. Default value is false. Changing this parameter will create a new resource.
sparkSqlAutoBroadcastJoinThreshold Double
Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display. Default value is 209715200. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource.
sparkSqlBadRecordsPath String
Path of bad records. Changing this parameter will create a new resource.
sparkSqlDynamicPartitionOverwriteEnabled Boolean
In dynamic mode, Spark does not delete the previous partitions and only overwrites the partitions without data during execution. Default value is false. Changing this parameter will create a new resource.
sparkSqlFilesMaxPartitionBytes Double
Maximum number of bytes to be packed into a single partition when a file is read. Default value is 134217728. Changing this parameter will create a new resource.
sparkSqlMaxRecordsPerFile Double
Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit. Default value is 0. Changing this parameter will create a new resource.
sparkSqlShufflePartitions Double
Default number of partitions used to filter data for join or aggregation. Default value is 4096. Changing this parameter will create a new resource.
dliSqlJobTimeout number
Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: ms. Changing this parameter will create a new resource.
dliSqlSqlasyncEnabled boolean
Specifies whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled. Default value is false. Changing this parameter will create a new resource.
sparkSqlAutoBroadcastJoinThreshold number
Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display. Default value is 209715200. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource.
sparkSqlBadRecordsPath string
Path of bad records. Changing this parameter will create a new resource.
sparkSqlDynamicPartitionOverwriteEnabled boolean
In dynamic mode, Spark does not delete the previous partitions and only overwrites the partitions without data during execution. Default value is false. Changing this parameter will create a new resource.
sparkSqlFilesMaxPartitionBytes number
Maximum number of bytes to be packed into a single partition when a file is read. Default value is 134217728. Changing this parameter will create a new resource.
sparkSqlMaxRecordsPerFile number
Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit. Default value is 0. Changing this parameter will create a new resource.
sparkSqlShufflePartitions number
Default number of partitions used to filter data for join or aggregation. Default value is 4096. Changing this parameter will create a new resource.
dli_sql_job_timeout float
Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: ms. Changing this parameter will create a new resource.
dli_sql_sqlasync_enabled bool
Specifies whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled. Default value is false. Changing this parameter will create a new resource.
spark_sql_auto_broadcast_join_threshold float
Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display. Default value is 209715200. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource.
spark_sql_bad_records_path str
Path of bad records. Changing this parameter will create a new resource.
spark_sql_dynamic_partition_overwrite_enabled bool
In dynamic mode, Spark does not delete the previous partitions and only overwrites the partitions without data during execution. Default value is false. Changing this parameter will create a new resource.
spark_sql_files_max_partition_bytes float
Maximum number of bytes to be packed into a single partition when a file is read. Default value is 134217728. Changing this parameter will create a new resource.
spark_sql_max_records_per_file float
Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit. Default value is 0. Changing this parameter will create a new resource.
spark_sql_shuffle_partitions float
Default number of partitions used to filter data for join or aggregation. Default value is 4096. Changing this parameter will create a new resource.
dliSqlJobTimeout Number
Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: ms. Changing this parameter will create a new resource.
dliSqlSqlasyncEnabled Boolean
Specifies whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled. Default value is false. Changing this parameter will create a new resource.
sparkSqlAutoBroadcastJoinThreshold Number
Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display. Default value is 209715200. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource.
sparkSqlBadRecordsPath String
Path of bad records. Changing this parameter will create a new resource.
sparkSqlDynamicPartitionOverwriteEnabled Boolean
In dynamic mode, Spark does not delete the previous partitions and only overwrites the partitions without data during execution. Default value is false. Changing this parameter will create a new resource.
sparkSqlFilesMaxPartitionBytes Number
Maximum number of bytes to be packed into a single partition when a file is read. Default value is 134217728. Changing this parameter will create a new resource.
sparkSqlMaxRecordsPerFile Number
Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit. Default value is 0. Changing this parameter will create a new resource.
sparkSqlShufflePartitions Number
Default number of partitions used to filter data for join or aggregation. Default value is 4096. Changing this parameter will create a new resource.

DliSqlJobTimeouts
, DliSqlJobTimeoutsArgs

Create string
Delete string
Create string
Delete string
create String
delete String
create string
delete string
create str
delete str
create String
delete String

Import

DLI SQL job can be imported by id, e.g.

$ pulumi import flexibleengine:index/dliSqlJob:DliSqlJob example 7f803d70-c533-469f-8431-e378f3e97123
Copy

Note that the imported state may not be identical to your resource definition, due to some attributes missing from the

API response, security or some other reason. The missing attributes include: conf, rows and schema.

It is generally recommended running pulumi preview after importing a resource. You can then decide if changes should

be applied to the resource, or the resource definition should be updated to align with the resource. Also, you can

ignore changes as below.

hcl

resource “flexibleengine_dli_sql_job” “test” {

...

lifecycle {

ignore_changes = [

  conf, rows, schema

]

}

}

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
flexibleengine flexibleenginecloud/terraform-provider-flexibleengine
License
Notes
This Pulumi package is based on the flexibleengine Terraform Provider.