diff --git a/docs/resources/job_definition.md b/docs/resources/job_definition.md
index 2dbd5bdb..efc70385 100644
--- a/docs/resources/job_definition.md
+++ b/docs/resources/job_definition.md
@@ -747,6 +747,36 @@ resource "trocco_job_definition" "decoder_example" {
### InputOptions
+#### BigqueryInputOption
+
+```terraform
+resource "trocco_job_definition" "bigquery_input_example" {
+ input_option_type = "bigquery"
+ input_option = {
+ bigquery_input_option = {
+ bigquery_connection_id = 1
+ gcs_uri = "test_bucket"
+ gcs_uri_format = "bucket"
+ query = "SELECT * FROM `test_dataset.test_table`"
+ temp_dataset = "temp_dataset"
+ location = "asia-northeast1"
+ is_standard_sql = true
+ cleanup_gcs_files = true
+ file_format = "CSV"
+ cache = true
+ bigquery_job_wait_second = 600
+
+ columns = [
+ {
+ name = "col1__c"
+ type = "string"
+ }
+ ]
+ }
+ }
+}
+```
+
#### MysqlInputOption
```terraform
@@ -1146,12 +1176,76 @@ Optional:
Optional:
+- `bigquery_input_option` (Attributes) Attributes about source bigquery (see [below for nested schema](#nestedatt--input_option--bigquery_input_option))
- `gcs_input_option` (Attributes) Attributes about source GCS (see [below for nested schema](#nestedatt--input_option--gcs_input_option))
- `google_spreadsheets_input_option` (Attributes) Attributes about source Google Spreadsheets (see [below for nested schema](#nestedatt--input_option--google_spreadsheets_input_option))
- `mysql_input_option` (Attributes) Attributes of source mysql (see [below for nested schema](#nestedatt--input_option--mysql_input_option))
- `salesforce_input_option` (Attributes) Attributes about source Salesforce (see [below for nested schema](#nestedatt--input_option--salesforce_input_option))
- `snowflake_input_option` (Attributes) Attributes about source snowflake (see [below for nested schema](#nestedatt--input_option--snowflake_input_option))
+
+### Nested Schema for `input_option.bigquery_input_option`
+
+Required:
+
+- `bigquery_connection_id` (Number) Id of bigquery connection
+- `columns` (Attributes List) List of columns to be retrieved and their types (see [below for nested schema](#nestedatt--input_option--bigquery_input_option--columns))
+- `gcs_uri` (String) GCS URI
+- `query` (String) Query
+- `temp_dataset` (String) Temporary dataset name
+
+Optional:
+
+- `bigquery_job_wait_second` (Number) Wait time in seconds until bigquery job is completed
+- `cache` (Boolean) Flag whether query cache is enabled
+- `cleanup_gcs_files` (Boolean) Flag whether temporary GCS files should be cleaned up
+- `custom_variable_settings` (Attributes List) (see [below for nested schema](#nestedatt--input_option--bigquery_input_option--custom_variable_settings))
+- `decoder` (Attributes) (see [below for nested schema](#nestedatt--input_option--bigquery_input_option--decoder))
+- `file_format` (String) File format of temporary GCS files
+- `gcs_uri_format` (String) Format of GCS URI
+- `is_standard_sql` (Boolean) Flag whether standard SQL is enabled
+- `location` (String) Location of bigquery job
+
+
+### Nested Schema for `input_option.bigquery_input_option.columns`
+
+Required:
+
+- `name` (String) Column name
+- `type` (String) Column type.
+
+Optional:
+
+- `format` (String) format
+
+
+
+### Nested Schema for `input_option.bigquery_input_option.custom_variable_settings`
+
+Required:
+
+- `name` (String) Custom variable name. It must start and end with `$`
+- `type` (String) Custom variable type. The following types are supported: `string`, `timestamp`, `timestamp_runtime`
+
+Optional:
+
+- `direction` (String) Direction of the diff from context_time. The following directions are supported: `ago`, `later`. Required in `timestamp` and `timestamp_runtime` types
+- `format` (String) Format used to replace variables. Required in `timestamp` and `timestamp_runtime` types
+- `quantity` (Number) Quantity used to calculate diff from context_time. Required in `timestamp` and `timestamp_runtime` types
+- `time_zone` (String) Time zone used to format the timestamp. Required in `timestamp` and `timestamp_runtime` types
+- `unit` (String) Time unit used to calculate diff from context_time. The following units are supported: `hour`, `date`, `month`. Required in `timestamp` and `timestamp_runtime` types
+- `value` (String) Fixed string which will replace variables at runtime. Required in `string` type
+
+
+
+### Nested Schema for `input_option.bigquery_input_option.decoder`
+
+Optional:
+
+- `match_name` (String) Relative path after decompression (regular expression). If not entered, all data in the compressed file will be transferred.
+
+
+
### Nested Schema for `input_option.gcs_input_option`
diff --git a/examples/resources/trocco_job_definition/input_options/bigquery_input_option.tf b/examples/resources/trocco_job_definition/input_options/bigquery_input_option.tf
new file mode 100644
index 00000000..b8b6e1a0
--- /dev/null
+++ b/examples/resources/trocco_job_definition/input_options/bigquery_input_option.tf
@@ -0,0 +1,25 @@
+resource "trocco_job_definition" "bigquery_input_example" {
+ input_option_type = "bigquery"
+ input_option = {
+ bigquery_input_option = {
+ bigquery_connection_id = 1
+ gcs_uri = "test_bucket"
+ gcs_uri_format = "bucket"
+ query = "SELECT * FROM `test_dataset.test_table`"
+ temp_dataset = "temp_dataset"
+ location = "asia-northeast1"
+ is_standard_sql = true
+ cleanup_gcs_files = true
+ file_format = "CSV"
+ cache = true
+ bigquery_job_wait_second = 600
+
+ columns = [
+ {
+ name = "col1__c"
+ type = "string"
+ }
+ ]
+ }
+ }
+}
diff --git a/internal/client/entity/job_definition/input_option/bigquery.go b/internal/client/entity/job_definition/input_option/bigquery.go
new file mode 100644
index 00000000..eb81eeea
--- /dev/null
+++ b/internal/client/entity/job_definition/input_option/bigquery.go
@@ -0,0 +1,28 @@
+package input_option
+
+import (
+ "terraform-provider-trocco/internal/client/entity"
+)
+
+type BigqueryInputOption struct {
+ BigqueryConnectionID int64 `json:"bigquery_connection_id"`
+ GcsUri string `json:"gcs_uri"`
+ GcsUriFormat *string `json:"gcs_uri_format"`
+ Query string `json:"query"`
+ TempDataset string `json:"temp_dataset"`
+ IsStandardSQL *bool `json:"is_standard_sql"`
+ CleanupGcsFiles *bool `json:"cleanup_gcs_files"`
+ FileFormat *string `json:"file_format"`
+ Location *string `json:"location"`
+ Cache *bool `json:"cache"`
+ BigqueryJobWaitSecond *int64 `json:"bigquery_job_wait_second"`
+
+ Columns []BigqueryColumn `json:"columns"`
+ CustomVariableSettings *[]entity.CustomVariableSetting `json:"custom_variable_settings"`
+}
+
+type BigqueryColumn struct {
+ Name string `json:"name"`
+ Type string `json:"type"`
+ Format *string `json:"format"`
+}
diff --git a/internal/client/job_definition.go b/internal/client/job_definition.go
index 1c541409..569bb755 100644
--- a/internal/client/job_definition.go
+++ b/internal/client/job_definition.go
@@ -92,6 +92,7 @@ type InputOption struct {
SnowflakeInputOption *inputOptionEntities.SnowflakeInputOption `json:"snowflake_input_option"`
SalesforceInputOption *inputOptionEntities.SalesforceInputOption `json:"salesforce_input_option"`
GoogleSpreadsheetsInputOption *inputOptionEntities.GoogleSpreadsheetsInputOption `json:"google_spreadsheets_input_option"`
+ BigqueryInputOption *inputOptionEntities.BigqueryInputOption `json:"bigquery_input_option"`
}
type InputOptionInput struct {
@@ -100,6 +101,7 @@ type InputOptionInput struct {
SnowflakeInputOption *parameter.NullableObject[input_options.SnowflakeInputOptionInput] `json:"snowflake_input_option,omitempty"`
SalesforceInputOption *parameter.NullableObject[input_options.SalesforceInputOptionInput] `json:"salesforce_input_option,omitempty"`
GoogleSpreadsheetsInputOption *parameter.NullableObject[input_options.GoogleSpreadsheetsInputOptionInput] `json:"google_spreadsheets_input_option,omitempty"`
+ BigqueryInputOption *parameter.NullableObject[input_options.BigqueryInputOptionInput] `json:"bigquery_input_option,omitempty"`
}
type UpdateInputOptionInput struct {
@@ -108,6 +110,7 @@ type UpdateInputOptionInput struct {
SnowflakeInputOption *parameter.NullableObject[input_options.UpdateSnowflakeInputOptionInput] `json:"snowflake_input_option,omitempty"`
SalesforceInputOption *parameter.NullableObject[input_options.UpdateSalesforceInputOptionInput] `json:"salesforce_input_option,omitempty"`
GoogleSpreadsheetsInputOption *parameter.NullableObject[input_options.UpdateGoogleSpreadsheetsInputOptionInput] `json:"google_spreadsheets_input_option,omitempty"`
+ BigqueryInputOption *parameter.NullableObject[input_options.UpdateBigqueryInputOptionInput] `json:"bigquery_input_option,omitempty"`
}
type OutputOption struct {
diff --git a/internal/client/parameter/job_definition/input_option/bigquery.go b/internal/client/parameter/job_definition/input_option/bigquery.go
new file mode 100644
index 00000000..9889de9d
--- /dev/null
+++ b/internal/client/parameter/job_definition/input_option/bigquery.go
@@ -0,0 +1,46 @@
+package input_options
+
+import (
+ "terraform-provider-trocco/internal/client/parameter"
+ job_definitions "terraform-provider-trocco/internal/client/parameter/job_definition"
+)
+
+type BigqueryInputOptionInput struct {
+ BigqueryConnectionID int64 `json:"bigquery_connection_id"`
+ GcsUri string `json:"gcs_uri"`
+ GcsUriFormat *parameter.NullableString `json:"gcs_uri_format,omitempty"`
+ Query string `json:"query"`
+ TempDataset string `json:"temp_dataset"`
+ IsStandardSQL *bool `json:"is_standard_sql,omitempty"`
+ CleanupGcsFiles *bool `json:"cleanup_gcs_files,omitempty"`
+ FileFormat *parameter.NullableString `json:"file_format,omitempty"`
+ Location *parameter.NullableString `json:"location,omitempty"`
+ Cache *bool `json:"cache,omitempty"`
+ BigqueryJobWaitSecond *int64 `json:"bigquery_job_wait_second,omitempty"`
+ Columns []BigqueryColumn `json:"columns,omitempty"`
+ CustomVariableSettings *[]parameter.CustomVariableSettingInput `json:"custom_variable_settings,omitempty"`
+ Decoder *job_definitions.DecoderInput `json:"decoder,omitempty"`
+}
+
+type UpdateBigqueryInputOptionInput struct {
+ BigqueryConnectionID *int64 `json:"bigquery_connection_id,omitempty"`
+ GcsUri *parameter.NullableString `json:"gcs_uri,omitempty"`
+ GcsUriFormat *parameter.NullableString `json:"gcs_uri_format,omitempty"`
+ Query *parameter.NullableString `json:"query,omitempty"`
+ TempDataset *parameter.NullableString `json:"temp_dataset,omitempty"`
+ IsStandardSQL *bool `json:"is_standard_sql,omitempty"`
+ CleanupGcsFiles *bool `json:"cleanup_gcs_files,omitempty"`
+ FileFormat *parameter.NullableString `json:"file_format,omitempty"`
+ Location *parameter.NullableString `json:"location,omitempty"`
+ Cache *bool `json:"cache,omitempty"`
+ BigqueryJobWaitSecond *int64 `json:"bigquery_job_wait_second,omitempty"`
+ Columns []BigqueryColumn `json:"columns,omitempty"`
+ CustomVariableSettings *[]parameter.CustomVariableSettingInput `json:"custom_variable_settings,omitempty"`
+ Decoder *job_definitions.DecoderInput `json:"decoder,omitempty"`
+}
+
+type BigqueryColumn struct {
+ Name string `json:"name"`
+ Type string `json:"type"`
+ Format *string `json:"format"`
+}
diff --git a/internal/provider/model/job_definition/input_option.go b/internal/provider/model/job_definition/input_option.go
index 4f2d54eb..1de8290b 100644
--- a/internal/provider/model/job_definition/input_option.go
+++ b/internal/provider/model/job_definition/input_option.go
@@ -12,6 +12,7 @@ type InputOption struct {
SnowflakeInputOption *input_options.SnowflakeInputOption `tfsdk:"snowflake_input_option"`
SalesforceInputOption *input_options.SalesforceInputOption `tfsdk:"salesforce_input_option"`
GoogleSpreadsheetsInputOption *input_options.GoogleSpreadsheetsInputOption `tfsdk:"google_spreadsheets_input_option"`
+ BigqueryInputOption *input_options.BigqueryInputOption `tfsdk:"bigquery_input_option"`
}
func NewInputOption(inputOption client.InputOption) *InputOption {
@@ -21,6 +22,7 @@ func NewInputOption(inputOption client.InputOption) *InputOption {
SnowflakeInputOption: input_options.NewSnowflakeInputOption(inputOption.SnowflakeInputOption),
SalesforceInputOption: input_options.NewSalesforceInputOption(inputOption.SalesforceInputOption),
GoogleSpreadsheetsInputOption: input_options.NewGoogleSpreadsheetsInputOption(inputOption.GoogleSpreadsheetsInputOption),
+ BigqueryInputOption: input_options.NewBigqueryInputOption(inputOption.BigqueryInputOption),
}
}
@@ -31,6 +33,7 @@ func (o InputOption) ToInput() client.InputOptionInput {
SnowflakeInputOption: model.WrapObject(o.SnowflakeInputOption.ToInput()),
SalesforceInputOption: model.WrapObject(o.SalesforceInputOption.ToInput()),
GoogleSpreadsheetsInputOption: model.WrapObject(o.GoogleSpreadsheetsInputOption.ToInput()),
+ BigqueryInputOption: model.WrapObject(o.BigqueryInputOption.ToInput()),
}
}
@@ -41,5 +44,6 @@ func (o InputOption) ToUpdateInput() *client.UpdateInputOptionInput {
SnowflakeInputOption: model.WrapObject(o.SnowflakeInputOption.ToUpdateInput()),
SalesforceInputOption: model.WrapObject(o.SalesforceInputOption.ToUpdateInput()),
GoogleSpreadsheetsInputOption: model.WrapObject(o.GoogleSpreadsheetsInputOption.ToUpdateInput()),
+ BigqueryInputOption: model.WrapObject(o.BigqueryInputOption.ToUpdateInput()),
}
}
diff --git a/internal/provider/model/job_definition/input_option/bigquery.go b/internal/provider/model/job_definition/input_option/bigquery.go
new file mode 100644
index 00000000..ec93ac7f
--- /dev/null
+++ b/internal/provider/model/job_definition/input_option/bigquery.go
@@ -0,0 +1,135 @@
+package input_options
+
+import (
+ "terraform-provider-trocco/internal/client/entity/job_definition/input_option"
+ param "terraform-provider-trocco/internal/client/parameter/job_definition/input_option"
+ "terraform-provider-trocco/internal/provider/model"
+
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+type BigqueryInputOption struct {
+ BigqueryConnectionID types.Int64 `tfsdk:"bigquery_connection_id"`
+ GcsUri types.String `tfsdk:"gcs_uri"`
+ GcsUriFormat types.String `tfsdk:"gcs_uri_format"`
+ Query types.String `tfsdk:"query"`
+ TempDataset types.String `tfsdk:"temp_dataset"`
+ IsStandardSQL types.Bool `tfsdk:"is_standard_sql"`
+ CleanupGcsFiles types.Bool `tfsdk:"cleanup_gcs_files"`
+ FileFormat types.String `tfsdk:"file_format"`
+ Location types.String `tfsdk:"location"`
+ Cache types.Bool `tfsdk:"cache"`
+ BigqueryJobWaitSecond types.Int64 `tfsdk:"bigquery_job_wait_second"`
+
+ Columns []BigqueryColumn `tfsdk:"columns"`
+ CustomVariableSettings *[]model.CustomVariableSetting `tfsdk:"custom_variable_settings"`
+ Decoder *Decoder `tfsdk:"decoder"`
+}
+
+type BigqueryColumn struct {
+ Name types.String `tfsdk:"name"`
+ Type types.String `tfsdk:"type"`
+ Format types.String `tfsdk:"format"`
+}
+
+func NewBigqueryInputOption(bigqueryInputOption *input_option.BigqueryInputOption) *BigqueryInputOption {
+ if bigqueryInputOption == nil {
+ return nil
+ }
+ return &BigqueryInputOption{
+ BigqueryConnectionID: types.Int64Value(bigqueryInputOption.BigqueryConnectionID),
+ GcsUri: types.StringValue(bigqueryInputOption.GcsUri),
+ GcsUriFormat: types.StringPointerValue(bigqueryInputOption.GcsUriFormat),
+ Query: types.StringValue(bigqueryInputOption.Query),
+ TempDataset: types.StringValue(bigqueryInputOption.TempDataset),
+ IsStandardSQL: types.BoolPointerValue(bigqueryInputOption.IsStandardSQL),
+ CleanupGcsFiles: types.BoolPointerValue(bigqueryInputOption.CleanupGcsFiles),
+ FileFormat: types.StringPointerValue(bigqueryInputOption.FileFormat),
+ Location: types.StringPointerValue(bigqueryInputOption.Location),
+ Cache: types.BoolPointerValue(bigqueryInputOption.Cache),
+ BigqueryJobWaitSecond: types.Int64PointerValue(bigqueryInputOption.BigqueryJobWaitSecond),
+
+ Columns: newBigqueryColumns(bigqueryInputOption.Columns),
+ CustomVariableSettings: model.NewCustomVariableSettings(bigqueryInputOption.CustomVariableSettings),
+ }
+}
+
+func (bigqueryInputOption *BigqueryInputOption) ToInput() *param.BigqueryInputOptionInput {
+ if bigqueryInputOption == nil {
+ return nil
+ }
+
+ return ¶m.BigqueryInputOptionInput{
+ BigqueryConnectionID: bigqueryInputOption.BigqueryConnectionID.ValueInt64(),
+ GcsUri: bigqueryInputOption.GcsUri.ValueString(),
+ GcsUriFormat: model.NewNullableString(bigqueryInputOption.GcsUriFormat),
+ Query: bigqueryInputOption.Query.ValueString(),
+ TempDataset: bigqueryInputOption.TempDataset.ValueString(),
+ IsStandardSQL: bigqueryInputOption.IsStandardSQL.ValueBoolPointer(),
+ CleanupGcsFiles: bigqueryInputOption.CleanupGcsFiles.ValueBoolPointer(),
+ FileFormat: model.NewNullableString(bigqueryInputOption.FileFormat),
+ Location: model.NewNullableString(bigqueryInputOption.Location),
+ Cache: bigqueryInputOption.Cache.ValueBoolPointer(),
+ BigqueryJobWaitSecond: bigqueryInputOption.BigqueryJobWaitSecond.ValueInt64Pointer(),
+
+ Columns: toBigqueryColumnsInput(bigqueryInputOption.Columns),
+ CustomVariableSettings: model.ToCustomVariableSettingInputs(bigqueryInputOption.CustomVariableSettings),
+ Decoder: bigqueryInputOption.Decoder.ToDecoderInput(),
+ }
+}
+
+func (bigqueryInputOption *BigqueryInputOption) ToUpdateInput() *param.UpdateBigqueryInputOptionInput {
+ if bigqueryInputOption == nil {
+ return nil
+ }
+
+ return ¶m.UpdateBigqueryInputOptionInput{
+ BigqueryConnectionID: bigqueryInputOption.BigqueryConnectionID.ValueInt64Pointer(),
+ GcsUri: model.NewNullableString(bigqueryInputOption.GcsUri),
+ GcsUriFormat: model.NewNullableString(bigqueryInputOption.GcsUriFormat),
+ Query: model.NewNullableString(bigqueryInputOption.Query),
+ TempDataset: model.NewNullableString(bigqueryInputOption.TempDataset),
+ IsStandardSQL: bigqueryInputOption.IsStandardSQL.ValueBoolPointer(),
+ CleanupGcsFiles: bigqueryInputOption.CleanupGcsFiles.ValueBoolPointer(),
+ FileFormat: model.NewNullableString(bigqueryInputOption.FileFormat),
+ Location: model.NewNullableString(bigqueryInputOption.Location),
+ Cache: bigqueryInputOption.Cache.ValueBoolPointer(),
+ BigqueryJobWaitSecond: bigqueryInputOption.BigqueryJobWaitSecond.ValueInt64Pointer(),
+
+ Columns: toBigqueryColumnsInput(bigqueryInputOption.Columns),
+ CustomVariableSettings: model.ToCustomVariableSettingInputs(bigqueryInputOption.CustomVariableSettings),
+ Decoder: bigqueryInputOption.Decoder.ToDecoderInput(),
+ }
+}
+
+func newBigqueryColumns(bigqueryColumns []input_option.BigqueryColumn) []BigqueryColumn {
+ if bigqueryColumns == nil {
+ return nil
+ }
+ columns := make([]BigqueryColumn, 0, len(bigqueryColumns))
+ for _, input := range bigqueryColumns {
+ column := BigqueryColumn{
+ Name: types.StringValue(input.Name),
+ Type: types.StringValue(input.Type),
+ Format: types.StringPointerValue(input.Format),
+ }
+ columns = append(columns, column)
+ }
+ return columns
+}
+
+func toBigqueryColumnsInput(columns []BigqueryColumn) []param.BigqueryColumn {
+ if columns == nil {
+ return nil
+ }
+
+ inputs := make([]param.BigqueryColumn, 0, len(columns))
+ for _, column := range columns {
+ inputs = append(inputs, param.BigqueryColumn{
+ Name: column.Name.ValueString(),
+ Type: column.Type.ValueString(),
+ Format: column.Format.ValueStringPointer(),
+ })
+ }
+ return inputs
+}
diff --git a/internal/provider/schema/job_definition/bigquery_input_option.go b/internal/provider/schema/job_definition/bigquery_input_option.go
new file mode 100644
index 00000000..2154b307
--- /dev/null
+++ b/internal/provider/schema/job_definition/bigquery_input_option.go
@@ -0,0 +1,134 @@
+package job_definition
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+)
+
+func BigqueryInputOptionSchema() schema.Attribute {
+ return schema.SingleNestedAttribute{
+ Optional: true,
+ MarkdownDescription: "Attributes about source bigquery",
+ Attributes: map[string]schema.Attribute{
+ "bigquery_connection_id": schema.Int64Attribute{
+ Required: true,
+ MarkdownDescription: "Id of bigquery connection",
+ Validators: []validator.Int64{
+ int64validator.AtLeast(1),
+ },
+ },
+ "gcs_uri": schema.StringAttribute{
+ Required: true,
+ Validators: []validator.String{
+ stringvalidator.UTF8LengthAtLeast(1),
+ },
+ MarkdownDescription: "GCS URI",
+ },
+ "gcs_uri_format": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Default: stringdefault.StaticString("bucket"),
+ Validators: []validator.String{
+ stringvalidator.OneOf("bucket", "custom_path"),
+ },
+ MarkdownDescription: "Format of GCS URI",
+ },
+ "query": schema.StringAttribute{
+ Required: true,
+ Validators: []validator.String{
+ stringvalidator.UTF8LengthAtLeast(1),
+ },
+ MarkdownDescription: "Query",
+ },
+ "temp_dataset": schema.StringAttribute{
+ Required: true,
+ Validators: []validator.String{
+ stringvalidator.UTF8LengthAtLeast(1),
+ },
+ MarkdownDescription: "Temporary dataset name",
+ },
+ "is_standard_sql": schema.BoolAttribute{
+ Optional: true,
+ Computed: true,
+ Default: booldefault.StaticBool(true),
+ MarkdownDescription: "Flag whether standard SQL is enabled",
+ },
+ "cleanup_gcs_files": schema.BoolAttribute{
+ Optional: true,
+ Computed: true,
+ Default: booldefault.StaticBool(true),
+ MarkdownDescription: "Flag whether temporary GCS files should be cleaned up",
+ },
+ "file_format": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Default: stringdefault.StaticString("CSV"),
+ Validators: []validator.String{
+ stringvalidator.OneOf("CSV", "NEWLINE_DELIMITED_JSON"),
+ },
+ MarkdownDescription: "File format of temporary GCS files",
+ },
+ "location": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Default: stringdefault.StaticString("US"),
+ Validators: []validator.String{
+ stringvalidator.UTF8LengthAtLeast(1),
+ },
+ MarkdownDescription: "Location of bigquery job",
+ },
+ "cache": schema.BoolAttribute{
+ Optional: true,
+ Computed: true,
+ Default: booldefault.StaticBool(true),
+ MarkdownDescription: "Flag whether query cache is enabled",
+ },
+ "bigquery_job_wait_second": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Default: int64default.StaticInt64(600),
+ MarkdownDescription: "Wait time in seconds until bigquery job is completed",
+ Validators: []validator.Int64{
+ int64validator.AtLeast(1),
+ },
+ },
+ "columns": schema.ListNestedAttribute{
+ Required: true,
+ MarkdownDescription: "List of columns to be retrieved and their types",
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "name": schema.StringAttribute{
+ Required: true,
+ Validators: []validator.String{
+ stringvalidator.UTF8LengthAtLeast(1),
+ },
+ MarkdownDescription: "Column name",
+ },
+ "type": schema.StringAttribute{
+ Required: true,
+ MarkdownDescription: "Column type.",
+ Validators: []validator.String{
+ stringvalidator.OneOf("boolean", "long", "timestamp", "double", "string", "json"),
+ },
+ },
+ "format": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: "format",
+ },
+ },
+ },
+ Validators: []validator.List{
+ listvalidator.SizeAtLeast(1),
+ },
+ },
+ "decoder": DecoderSchema(),
+ "custom_variable_settings": CustomVariableSettingsSchema(),
+ },
+ }
+}
diff --git a/internal/provider/schema/job_definition/input_option.go b/internal/provider/schema/job_definition/input_option.go
index e08c2bea..412b2b40 100644
--- a/internal/provider/schema/job_definition/input_option.go
+++ b/internal/provider/schema/job_definition/input_option.go
@@ -16,6 +16,7 @@ func InputOptionSchema() schema.Attribute {
"snowflake_input_option": SnowflakeInputOptionSchema(),
"salesforce_input_option": SalesforceInputOptionSchema(),
"google_spreadsheets_input_option": GoogleSpreadsheetsInputOptionSchema(),
+ "bigquery_input_option": BigqueryInputOptionSchema(),
},
PlanModifiers: []planmodifier.Object{
&planmodifier2.InputOptionPlanModifier{},
diff --git a/templates/resources/job_definition.md.tmpl b/templates/resources/job_definition.md.tmpl
index a24b6934..0a5ea37e 100644
--- a/templates/resources/job_definition.md.tmpl
+++ b/templates/resources/job_definition.md.tmpl
@@ -100,6 +100,10 @@ Minimum configuration
### InputOptions
+#### BigqueryInputOption
+
+{{codefile "terraform" "examples/resources/trocco_job_definition/input_options/bigquery_input_option.tf"}}
+
#### MysqlInputOption
{{codefile "terraform" "examples/resources/trocco_job_definition/input_options/mysql_input_option.tf"}}