From c892da656c3c2a4efd198bfe5e0ed9581a64a293 Mon Sep 17 00:00:00 2001 From: SevenEarth <391613297@qq.com> Date: Tue, 28 Oct 2025 17:12:59 +0800 Subject: [PATCH 1/5] add --- tencentcloud/provider.go | 2 + tencentcloud/provider.md | 2 + ...urce_tc_dlc_datasource_house_attachment.go | 1890 +++++++++++++++++ ...urce_tc_dlc_datasource_house_attachment.md | 27 + ...tc_dlc_datasource_house_attachment_test.go | 79 + ...e_tc_dlc_standard_engine_resource_group.go | 229 ++ ...e_tc_dlc_standard_engine_resource_group.md | 4 + ...ndard_engine_resource_group_config_info.go | 816 +++++++ ...ndard_engine_resource_group_config_info.md | 39 + ..._engine_resource_group_config_info_test.go | 87 + .../services/dlc/service_tencentcloud_dlc.go | 87 + ..._datasource_house_attachment.html.markdown | 278 +++ ...andard_engine_resource_group.html.markdown | 5 + ...e_resource_group_config_info.html.markdown | 85 + website/tencentcloud.erb | 6 + 15 files changed, 3636 insertions(+) create mode 100644 tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.go create mode 100644 tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.md create mode 100644 tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment_test.go create mode 100644 tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.go create mode 100644 tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.md create mode 100644 tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info_test.go create mode 100644 website/docs/r/dlc_datasource_house_attachment.html.markdown create mode 100644 website/docs/r/dlc_standard_engine_resource_group_config_info.html.markdown diff --git a/tencentcloud/provider.go b/tencentcloud/provider.go index 0ee049fe6e..b6a120bbe8 100644 --- a/tencentcloud/provider.go +++ b/tencentcloud/provider.go @@ -2293,6 +2293,8 @@ func Provider() *schema.Provider { "tencentcloud_dlc_standard_engine_resource_group": dlc.ResourceTencentCloudDlcStandardEngineResourceGroup(), "tencentcloud_dlc_data_mask_strategy": dlc.ResourceTencentCloudDlcDataMaskStrategy(), "tencentcloud_dlc_attach_data_mask_policy": dlc.ResourceTencentCloudDlcAttachDataMaskPolicy(), + "tencentcloud_dlc_standard_engine_resource_group_config_info": dlc.ResourceTencentCloudDlcStandardEngineResourceGroupConfigInfo(), + "tencentcloud_dlc_datasource_house_attachment": dlc.ResourceTencentCloudDlcDatasourceHouseAttachment(), "tencentcloud_waf_custom_rule": waf.ResourceTencentCloudWafCustomRule(), "tencentcloud_waf_custom_white_rule": waf.ResourceTencentCloudWafCustomWhiteRule(), "tencentcloud_waf_clb_domain": waf.ResourceTencentCloudWafClbDomain(), diff --git a/tencentcloud/provider.md b/tencentcloud/provider.md index 91e1b1a06c..fce80062e3 100644 --- a/tencentcloud/provider.md +++ b/tencentcloud/provider.md @@ -2175,6 +2175,8 @@ tencentcloud_dlc_user_vpc_connection tencentcloud_dlc_standard_engine_resource_group tencentcloud_dlc_data_mask_strategy tencentcloud_dlc_attach_data_mask_policy +tencentcloud_dlc_standard_engine_resource_group_config_info +tencentcloud_dlc_datasource_house_attachment Web Application Firewall(WAF) Data Source diff --git a/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.go b/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.go new file mode 100644 index 0000000000..3c35cd1c64 --- /dev/null +++ b/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.go @@ -0,0 +1,1890 @@ +package dlc + +import ( + "context" + "fmt" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + dlcv20210125 "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dlc/v20210125" + + tccommon "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/common" + "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/internal/helper" +) + +func ResourceTencentCloudDlcDatasourceHouseAttachment() *schema.Resource { + return &schema.Resource{ + Create: resourceTencentCloudDlcDatasourceHouseAttachmentCreate, + Read: resourceTencentCloudDlcDatasourceHouseAttachmentRead, + Update: resourceTencentCloudDlcDatasourceHouseAttachmentUpdate, + Delete: resourceTencentCloudDlcDatasourceHouseAttachmentDelete, + Schema: map[string]*schema.Schema{ + "datasource_connection_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Network configuration name.", + }, + + "datasource_connection_type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Data source type. Allow value: Mysql, HiveCos, HiveHdfs, HiveCHdfs, Kafka, OtherDatasourceConnection, PostgreSql, SqlServer, ClickHouse, Elasticsearch, TDSQLPostgreSql, TCHouseD, TccHive.", + }, + + "datasource_connection_config": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: "Data source network configuration.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "mysql": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of MySQL data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "jdbc_url": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "JDBC URL for connecting to MySQL.", + }, + "user": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Username.", + }, + "password": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "MySQL password.", + }, + "location": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: "Network information for MySQL data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Database name.", + }, + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Database instance ID, consistent with the database side.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Database instance name, consistent with the database side.", + }, + }, + }, + }, + "hive": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of Hive data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "meta_store_url": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Address of Hive metastore.", + }, + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Hive data source type, representing data storage location, COS or HDFS.", + }, + "location": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: "Private network information where the data source is located.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "user": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "If the type is HDFS, a username is required.", + }, + "high_availability": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: "If the type is HDFS, high availability needs to be selected.", + }, + "bucket_url": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "If the type is COS, COS bucket connection needs to be filled in.", + }, + "hdfs_properties": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "JSON string. If the type is HDFS, this field needs to be filled in.", + }, + "mysql": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Metadata database information for Hive.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "jdbc_url": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "JDBC URL for connecting to MySQL.", + }, + "user": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Username.", + }, + "password": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "MySQL password.", + }, + "location": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: "Network information for MySQL data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Database name.", + }, + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Database instance ID, consistent with the database side.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Database instance name, consistent with the database side.", + }, + }, + }, + }, + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "EMR cluster ID.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "EMR cluster name.", + }, + "hive_version": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Version number of Hive component in EMR cluster.", + }, + "kerberos_info": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Kerberos details.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "krb5_conf": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Krb5Conf file value.", + }, + "key_tab": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "KeyTab file value.", + }, + "service_principal": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Service principal.", + }, + }, + }, + }, + "kerberos_enable": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: "Whether to enable Kerberos.", + }, + }, + }, + }, + "kafka": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of Kafka data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Kafka instance ID.", + }, + "location": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: "Network information for Kafka data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + }, + }, + }, + "other_datasource_connection": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of other data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: "Network parameters.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + }, + }, + }, + "postgre_sql": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of PostgreSQL data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Unique ID of the data source instance.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Name of the data source.", + }, + "jdbc_url": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "JDBC access link for the data source.", + }, + "user": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Username for accessing the data source.", + }, + "password": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source access password, requires base64 encoding.", + }, + "location": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "VPC and subnet information for the data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Default database name.", + }, + }, + }, + }, + "sql_server": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of SQLServer data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Unique ID of the data source instance.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Name of the data source.", + }, + "jdbc_url": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "JDBC access link for the data source.", + }, + "user": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Username for accessing the data source.", + }, + "password": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source access password, requires base64 encoding.", + }, + "location": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "VPC and subnet information for the data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Default database name.", + }, + }, + }, + }, + "click_house": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of ClickHouse data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Unique ID of the data source instance.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Name of the data source.", + }, + "jdbc_url": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "JDBC access link for the data source.", + }, + "user": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Username for accessing the data source.", + }, + "password": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source access password, requires base64 encoding.", + }, + "location": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "VPC and subnet information for the data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Default database name.", + }, + }, + }, + }, + "elasticsearch": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of Elasticsearch data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source ID.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source name.", + }, + "user": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Username.", + }, + "password": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Password, requires base64 encoding.", + }, + "location": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "VPC and subnet information for the data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Default database name.", + }, + "service_info": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: "IP and port information for accessing Elasticsearch.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "ip": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "IP information.", + }, + "port": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + Description: "Port information.", + }, + }, + }, + }, + }, + }, + }, + "tdsql_postgre_sql": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of TDSQL-PostgreSQL data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Unique ID of the data source instance.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Name of the data source.", + }, + "jdbc_url": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "JDBC access link for the data source.", + }, + "user": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Username for accessing the data source.", + }, + "password": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source access password, requires base64 encoding.", + }, + "location": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "VPC and subnet information for the data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Default database name.", + }, + }, + }, + }, + "tc_house_d": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Properties of Doris data source connection.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Unique ID of the data source instance.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source name.", + }, + "jdbc_url": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "JDBC of the data source.", + }, + "user": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "User for accessing the data source.", + }, + "password": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Data source access password, requires base64 encoding.", + }, + "location": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "VPC and subnet information for the data source.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "vpc_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "VPC IPv4 CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Subnet IPv4 CIDR.", + }, + }, + }, + }, + "db_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Default database name.", + }, + "access_info": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Access information.", + }, + }, + }, + }, + "tcc_hive": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "TccHive data catalog connection information.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Instance ID.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Instance name.", + }, + "endpoint_service_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Endpoint service ID.", + }, + "meta_store_url": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Thrift connection address.", + }, + "hive_version": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Hive version.", + }, + "tcc_connection": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Description: "Network information.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "clb_ip": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Service CLB IP.", + }, + "clb_port": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Service CLB port.", + }, + "vpc_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "VPC instance ID.", + }, + "vpc_cidr_block": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "VPC CIDR.", + }, + "subnet_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Subnet instance ID.", + }, + "subnet_cidr_block": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Subnet CIDR.", + }, + }, + }, + }, + "hms_endpoint_service_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "HMS endpoint service ID.", + }, + }, + }, + }, + }, + }, + }, + + "data_engine_names": { + Type: schema.TypeSet, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: "Engine name, only one engine can be bound.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "network_connection_type": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + Description: "Network type, 2-cross-source type, 4-enhanced type.", + }, + + "network_connection_desc": { + Type: schema.TypeString, + Optional: true, + Description: "Network configuration description.", + }, + }, + } +} + +func resourceTencentCloudDlcDatasourceHouseAttachmentCreate(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_datasource_house_attachment.create")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + request = dlcv20210125.NewAssociateDatasourceHouseRequest() + datasourceConnectionName string + ) + + if v, ok := d.GetOk("datasource_connection_name"); ok { + request.DatasourceConnectionName = helper.String(v.(string)) + datasourceConnectionName = v.(string) + } + + if v, ok := d.GetOk("datasource_connection_type"); ok { + request.DatasourceConnectionType = helper.String(v.(string)) + } + + if datasourceConnectionConfigMap, ok := helper.InterfacesHeadMap(d, "datasource_connection_config"); ok { + datasourceConnectionConfig := dlcv20210125.DatasourceConnectionConfig{} + if mysqlMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["mysql"]); ok { + mysqlInfo := dlcv20210125.MysqlInfo{} + if v, ok := mysqlMap["jdbc_url"].(string); ok { + mysqlInfo.JdbcUrl = helper.String(v) + } + + if v, ok := mysqlMap["user"].(string); ok { + mysqlInfo.User = helper.String(v) + } + + if v, ok := mysqlMap["password"].(string); ok { + mysqlInfo.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(mysqlMap["location"]); ok { + datasourceConnectionLocation := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation.SubnetCidrBlock = helper.String(v) + } + + mysqlInfo.Location = &datasourceConnectionLocation + } + + if v, ok := mysqlMap["db_name"].(string); ok { + mysqlInfo.DbName = helper.String(v) + } + + if v, ok := mysqlMap["instance_id"].(string); ok { + mysqlInfo.InstanceId = helper.String(v) + } + + if v, ok := mysqlMap["instance_name"].(string); ok { + mysqlInfo.InstanceName = helper.String(v) + } + + datasourceConnectionConfig.Mysql = &mysqlInfo + } + + if hiveMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["hive"]); ok { + hiveInfo := dlcv20210125.HiveInfo{} + if v, ok := hiveMap["meta_store_url"].(string); ok { + hiveInfo.MetaStoreUrl = helper.String(v) + } + + if v, ok := hiveMap["type"].(string); ok { + hiveInfo.Type = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(hiveMap["location"]); ok { + datasourceConnectionLocation2 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation2.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation2.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation2.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation2.SubnetCidrBlock = helper.String(v) + } + + hiveInfo.Location = &datasourceConnectionLocation2 + } + + if v, ok := hiveMap["user"].(string); ok { + hiveInfo.User = helper.String(v) + } + + if v, ok := hiveMap["high_availability"].(bool); ok { + hiveInfo.HighAvailability = helper.Bool(v) + } + + if v, ok := hiveMap["bucket_url"].(string); ok { + hiveInfo.BucketUrl = helper.String(v) + } + + if v, ok := hiveMap["hdfs_properties"].(string); ok { + hiveInfo.HdfsProperties = helper.String(v) + } + + if mysqlMap, ok := helper.ConvertInterfacesHeadToMap(hiveMap["mysql"]); ok { + mysqlInfo2 := dlcv20210125.MysqlInfo{} + if v, ok := mysqlMap["jdbc_url"].(string); ok { + mysqlInfo2.JdbcUrl = helper.String(v) + } + + if v, ok := mysqlMap["user"].(string); ok { + mysqlInfo2.User = helper.String(v) + } + + if v, ok := mysqlMap["password"].(string); ok { + mysqlInfo2.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(mysqlMap["location"]); ok { + datasourceConnectionLocation3 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation3.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation3.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation3.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation3.SubnetCidrBlock = helper.String(v) + } + + mysqlInfo2.Location = &datasourceConnectionLocation3 + } + + if v, ok := mysqlMap["db_name"].(string); ok { + mysqlInfo2.DbName = helper.String(v) + } + + if v, ok := mysqlMap["instance_id"].(string); ok { + mysqlInfo2.InstanceId = helper.String(v) + } + + if v, ok := mysqlMap["instance_name"].(string); ok { + mysqlInfo2.InstanceName = helper.String(v) + } + + hiveInfo.Mysql = &mysqlInfo2 + } + + if v, ok := hiveMap["instance_id"].(string); ok { + hiveInfo.InstanceId = helper.String(v) + } + + if v, ok := hiveMap["instance_name"].(string); ok { + hiveInfo.InstanceName = helper.String(v) + } + + if v, ok := hiveMap["hive_version"].(string); ok { + hiveInfo.HiveVersion = helper.String(v) + } + + if kerberosInfoMap, ok := helper.ConvertInterfacesHeadToMap(hiveMap["kerberos_info"]); ok { + kerberosInfo := dlcv20210125.KerberosInfo{} + if v, ok := kerberosInfoMap["krb5_conf"].(string); ok { + kerberosInfo.Krb5Conf = helper.String(v) + } + + if v, ok := kerberosInfoMap["key_tab"].(string); ok { + kerberosInfo.KeyTab = helper.String(v) + } + + if v, ok := kerberosInfoMap["service_principal"].(string); ok { + kerberosInfo.ServicePrincipal = helper.String(v) + } + + hiveInfo.KerberosInfo = &kerberosInfo + } + + if v, ok := hiveMap["kerberos_enable"].(bool); ok { + hiveInfo.KerberosEnable = helper.Bool(v) + } + + datasourceConnectionConfig.Hive = &hiveInfo + } + + if kafkaMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["kafka"]); ok { + kafkaInfo := dlcv20210125.KafkaInfo{} + if v, ok := kafkaMap["instance_id"].(string); ok { + kafkaInfo.InstanceId = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(kafkaMap["location"]); ok { + datasourceConnectionLocation4 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation4.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation4.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation4.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation4.SubnetCidrBlock = helper.String(v) + } + + kafkaInfo.Location = &datasourceConnectionLocation4 + } + + datasourceConnectionConfig.Kafka = &kafkaInfo + } + + if otherDatasourceConnectionMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["other_datasource_connection"]); ok { + otherDatasourceConnection := dlcv20210125.OtherDatasourceConnection{} + if locationMap, ok := helper.ConvertInterfacesHeadToMap(otherDatasourceConnectionMap["location"]); ok { + datasourceConnectionLocation5 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation5.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation5.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation5.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation5.SubnetCidrBlock = helper.String(v) + } + + otherDatasourceConnection.Location = &datasourceConnectionLocation5 + } + + datasourceConnectionConfig.OtherDatasourceConnection = &otherDatasourceConnection + } + + if postgreSqlMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["postgre_sql"]); ok { + dataSourceInfo := dlcv20210125.DataSourceInfo{} + if v, ok := postgreSqlMap["instance_id"].(string); ok { + dataSourceInfo.InstanceId = helper.String(v) + } + + if v, ok := postgreSqlMap["instance_name"].(string); ok { + dataSourceInfo.InstanceName = helper.String(v) + } + + if v, ok := postgreSqlMap["jdbc_url"].(string); ok { + dataSourceInfo.JdbcUrl = helper.String(v) + } + + if v, ok := postgreSqlMap["user"].(string); ok { + dataSourceInfo.User = helper.String(v) + } + + if v, ok := postgreSqlMap["password"].(string); ok { + dataSourceInfo.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(postgreSqlMap["location"]); ok { + datasourceConnectionLocation6 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation6.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation6.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation6.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation6.SubnetCidrBlock = helper.String(v) + } + + dataSourceInfo.Location = &datasourceConnectionLocation6 + } + + if v, ok := postgreSqlMap["db_name"].(string); ok { + dataSourceInfo.DbName = helper.String(v) + } + + datasourceConnectionConfig.PostgreSql = &dataSourceInfo + } + + if sqlServerMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["sql_server"]); ok { + dataSourceInfo2 := dlcv20210125.DataSourceInfo{} + if v, ok := sqlServerMap["instance_id"].(string); ok { + dataSourceInfo2.InstanceId = helper.String(v) + } + + if v, ok := sqlServerMap["instance_name"].(string); ok { + dataSourceInfo2.InstanceName = helper.String(v) + } + + if v, ok := sqlServerMap["jdbc_url"].(string); ok { + dataSourceInfo2.JdbcUrl = helper.String(v) + } + + if v, ok := sqlServerMap["user"].(string); ok { + dataSourceInfo2.User = helper.String(v) + } + + if v, ok := sqlServerMap["password"].(string); ok { + dataSourceInfo2.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(sqlServerMap["location"]); ok { + datasourceConnectionLocation7 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation7.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation7.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation7.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation7.SubnetCidrBlock = helper.String(v) + } + + dataSourceInfo2.Location = &datasourceConnectionLocation7 + } + + if v, ok := sqlServerMap["db_name"].(string); ok { + dataSourceInfo2.DbName = helper.String(v) + } + + datasourceConnectionConfig.SqlServer = &dataSourceInfo2 + } + + if clickHouseMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["click_house"]); ok { + dataSourceInfo3 := dlcv20210125.DataSourceInfo{} + if v, ok := clickHouseMap["instance_id"].(string); ok { + dataSourceInfo3.InstanceId = helper.String(v) + } + + if v, ok := clickHouseMap["instance_name"].(string); ok { + dataSourceInfo3.InstanceName = helper.String(v) + } + + if v, ok := clickHouseMap["jdbc_url"].(string); ok { + dataSourceInfo3.JdbcUrl = helper.String(v) + } + + if v, ok := clickHouseMap["user"].(string); ok { + dataSourceInfo3.User = helper.String(v) + } + + if v, ok := clickHouseMap["password"].(string); ok { + dataSourceInfo3.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(clickHouseMap["location"]); ok { + datasourceConnectionLocation8 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation8.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation8.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation8.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation8.SubnetCidrBlock = helper.String(v) + } + + dataSourceInfo3.Location = &datasourceConnectionLocation8 + } + + if v, ok := clickHouseMap["db_name"].(string); ok { + dataSourceInfo3.DbName = helper.String(v) + } + + datasourceConnectionConfig.ClickHouse = &dataSourceInfo3 + } + + if elasticsearchMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["elasticsearch"]); ok { + elasticsearchInfo := dlcv20210125.ElasticsearchInfo{} + if v, ok := elasticsearchMap["instance_id"].(string); ok { + elasticsearchInfo.InstanceId = helper.String(v) + } + + if v, ok := elasticsearchMap["instance_name"].(string); ok { + elasticsearchInfo.InstanceName = helper.String(v) + } + + if v, ok := elasticsearchMap["user"].(string); ok { + elasticsearchInfo.User = helper.String(v) + } + + if v, ok := elasticsearchMap["password"].(string); ok { + elasticsearchInfo.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(elasticsearchMap["location"]); ok { + datasourceConnectionLocation9 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation9.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation9.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation9.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation9.SubnetCidrBlock = helper.String(v) + } + + elasticsearchInfo.Location = &datasourceConnectionLocation9 + } + + if v, ok := elasticsearchMap["db_name"].(string); ok { + elasticsearchInfo.DbName = helper.String(v) + } + + if v, ok := elasticsearchMap["service_info"]; ok { + for _, item := range v.([]interface{}) { + serviceInfoMap := item.(map[string]interface{}) + ipPortPair := dlcv20210125.IpPortPair{} + if v, ok := serviceInfoMap["ip"].(string); ok { + ipPortPair.Ip = helper.String(v) + } + + if v, ok := serviceInfoMap["port"].(int); ok { + ipPortPair.Port = helper.IntInt64(v) + } + + elasticsearchInfo.ServiceInfo = append(elasticsearchInfo.ServiceInfo, &ipPortPair) + } + } + + datasourceConnectionConfig.Elasticsearch = &elasticsearchInfo + } + + if tDSQLPostgreSqlMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["tdsql_postgre_sql"]); ok { + dataSourceInfo4 := dlcv20210125.DataSourceInfo{} + if v, ok := tDSQLPostgreSqlMap["instance_id"].(string); ok { + dataSourceInfo4.InstanceId = helper.String(v) + } + + if v, ok := tDSQLPostgreSqlMap["instance_name"].(string); ok { + dataSourceInfo4.InstanceName = helper.String(v) + } + + if v, ok := tDSQLPostgreSqlMap["jdbc_url"].(string); ok { + dataSourceInfo4.JdbcUrl = helper.String(v) + } + + if v, ok := tDSQLPostgreSqlMap["user"].(string); ok { + dataSourceInfo4.User = helper.String(v) + } + + if v, ok := tDSQLPostgreSqlMap["password"].(string); ok { + dataSourceInfo4.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(tDSQLPostgreSqlMap["location"]); ok { + datasourceConnectionLocation10 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation10.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation10.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation10.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation10.SubnetCidrBlock = helper.String(v) + } + + dataSourceInfo4.Location = &datasourceConnectionLocation10 + } + + if v, ok := tDSQLPostgreSqlMap["db_name"].(string); ok { + dataSourceInfo4.DbName = helper.String(v) + } + + datasourceConnectionConfig.TDSQLPostgreSql = &dataSourceInfo4 + } + + if tCHouseDMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["tc_house_d"]); ok { + tCHouseD := dlcv20210125.TCHouseD{} + if v, ok := tCHouseDMap["instance_id"].(string); ok { + tCHouseD.InstanceId = helper.String(v) + } + + if v, ok := tCHouseDMap["instance_name"].(string); ok { + tCHouseD.InstanceName = helper.String(v) + } + + if v, ok := tCHouseDMap["jdbc_url"].(string); ok { + tCHouseD.JdbcUrl = helper.String(v) + } + + if v, ok := tCHouseDMap["user"].(string); ok { + tCHouseD.User = helper.String(v) + } + + if v, ok := tCHouseDMap["password"].(string); ok { + tCHouseD.Password = helper.String(v) + } + + if locationMap, ok := helper.ConvertInterfacesHeadToMap(tCHouseDMap["location"]); ok { + datasourceConnectionLocation11 := dlcv20210125.DatasourceConnectionLocation{} + if v, ok := locationMap["vpc_id"].(string); ok { + datasourceConnectionLocation11.VpcId = helper.String(v) + } + + if v, ok := locationMap["vpc_cidr_block"].(string); ok { + datasourceConnectionLocation11.VpcCidrBlock = helper.String(v) + } + + if v, ok := locationMap["subnet_id"].(string); ok { + datasourceConnectionLocation11.SubnetId = helper.String(v) + } + + if v, ok := locationMap["subnet_cidr_block"].(string); ok { + datasourceConnectionLocation11.SubnetCidrBlock = helper.String(v) + } + + tCHouseD.Location = &datasourceConnectionLocation11 + } + + if v, ok := tCHouseDMap["db_name"].(string); ok { + tCHouseD.DbName = helper.String(v) + } + + if v, ok := tCHouseDMap["access_info"].(string); ok { + tCHouseD.AccessInfo = helper.String(v) + } + + datasourceConnectionConfig.TCHouseD = &tCHouseD + } + + if tccHiveMap, ok := helper.ConvertInterfacesHeadToMap(datasourceConnectionConfigMap["tcc_hive"]); ok { + tccHive := dlcv20210125.TccHive{} + if v, ok := tccHiveMap["instance_id"].(string); ok { + tccHive.InstanceId = helper.String(v) + } + + if v, ok := tccHiveMap["instance_name"].(string); ok { + tccHive.InstanceName = helper.String(v) + } + + if v, ok := tccHiveMap["endpoint_service_id"].(string); ok { + tccHive.EndpointServiceId = helper.String(v) + } + + if v, ok := tccHiveMap["meta_store_url"].(string); ok { + tccHive.MetaStoreUrl = helper.String(v) + } + + if v, ok := tccHiveMap["hive_version"].(string); ok { + tccHive.HiveVersion = helper.String(v) + } + + if tccConnectionMap, ok := helper.ConvertInterfacesHeadToMap(tccHiveMap["tcc_connection"]); ok { + netWork := dlcv20210125.NetWork{} + if v, ok := tccConnectionMap["clb_ip"].(string); ok { + netWork.ClbIp = helper.String(v) + } + + if v, ok := tccConnectionMap["clb_port"].(string); ok { + netWork.ClbPort = helper.String(v) + } + + if v, ok := tccConnectionMap["vpc_id"].(string); ok { + netWork.VpcId = helper.String(v) + } + + if v, ok := tccConnectionMap["vpc_cidr_block"].(string); ok { + netWork.VpcCidrBlock = helper.String(v) + } + + if v, ok := tccConnectionMap["subnet_id"].(string); ok { + netWork.SubnetId = helper.String(v) + } + + if v, ok := tccConnectionMap["subnet_cidr_block"].(string); ok { + netWork.SubnetCidrBlock = helper.String(v) + } + + tccHive.TccConnection = &netWork + } + + if v, ok := tccHiveMap["hms_endpoint_service_id"].(string); ok { + tccHive.HmsEndpointServiceId = helper.String(v) + } + + datasourceConnectionConfig.TccHive = &tccHive + } + + request.DatasourceConnectionConfig = &datasourceConnectionConfig + } + + if v, ok := d.GetOk("data_engine_names"); ok { + dataEngineNamesSet := v.(*schema.Set).List() + for i := range dataEngineNamesSet { + dataEngineNames := dataEngineNamesSet[i].(string) + request.DataEngineNames = append(request.DataEngineNames, helper.String(dataEngineNames)) + } + } + + if v, ok := d.GetOkExists("network_connection_type"); ok { + request.NetworkConnectionType = helper.IntInt64(v.(int)) + } + + if v, ok := d.GetOk("network_connection_desc"); ok { + request.NetworkConnectionDesc = helper.String(v.(string)) + } + + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().AssociateDatasourceHouseWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s create dlc datasource house attachment failed, reason:%+v", logId, reqErr) + return reqErr + } + + d.SetId(datasourceConnectionName) + + // wait + waitReq := dlcv20210125.NewDescribeNetworkConnectionsRequest() + waitReq.NetworkConnectionName = &datasourceConnectionName + reqErr = resource.Retry(tccommon.ReadRetryTimeout*2, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeNetworkConnectionsWithContext(ctx, waitReq) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, waitReq.GetAction(), waitReq.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil || result.Response.NetworkConnectionSet == nil || len(result.Response.NetworkConnectionSet) == 0 { + return resource.NonRetryableError(fmt.Errorf("Describe dlc datasource house attachment failed, Response is nil.")) + } + + if result.Response.NetworkConnectionSet[0].State != nil && *result.Response.NetworkConnectionSet[0].State == 1 { + return nil + } + + return resource.RetryableError(fmt.Errorf("DLC datasource house attachment is still running...")) + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s create dlc datasource house attachment failed, reason:%+v", logId, reqErr) + return reqErr + } + + return resourceTencentCloudDlcDatasourceHouseAttachmentRead(d, meta) +} + +func resourceTencentCloudDlcDatasourceHouseAttachmentRead(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_datasource_house_attachment.read")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + service = DlcService{client: meta.(tccommon.ProviderMeta).GetAPIV3Conn()} + datasourceConnectionName = d.Id() + ) + + respData, err := service.DescribeDlcDatasourceHouseAttachmentById(ctx, datasourceConnectionName) + if err != nil { + return err + } + + if respData == nil { + log.Printf("[WARN]%s resource `tencentcloud_dlc_datasource_house_attachment` [%s] not found, please check if it has been deleted.\n", logId, d.Id()) + d.SetId("") + return nil + } + + if respData.DatasourceConnectionName != nil { + _ = d.Set("datasource_connection_name", respData.DatasourceConnectionName) + } + + if respData.HouseName != nil { + _ = d.Set("data_engine_names", []string{*respData.HouseName}) + } + + if respData.NetworkConnectionType != nil { + _ = d.Set("network_connection_type", respData.NetworkConnectionType) + } + + if respData.NetworkConnectionDesc != nil { + _ = d.Set("network_connection_desc", respData.NetworkConnectionDesc) + } + + return nil +} + +func resourceTencentCloudDlcDatasourceHouseAttachmentUpdate(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_datasource_house_attachment.delete")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + datasourceConnectionName = d.Id() + ) + + if d.HasChange("network_connection_desc") { + request := dlcv20210125.NewUpdateNetworkConnectionRequest() + if v, ok := d.GetOk("network_connection_desc"); ok { + request.NetworkConnectionDesc = helper.String(v.(string)) + } + + request.NetworkConnectionName = &datasourceConnectionName + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UpdateNetworkConnectionWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s update dlc datasource house attachment failed, reason:%+v", logId, reqErr) + return reqErr + } + } + + return resourceTencentCloudDlcDatasourceHouseAttachmentRead(d, meta) +} + +func resourceTencentCloudDlcDatasourceHouseAttachmentDelete(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_datasource_house_attachment.delete")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + request = dlcv20210125.NewUnboundDatasourceHouseRequest() + datasourceConnectionName = d.Id() + ) + + request.NetworkConnectionName = &datasourceConnectionName + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UnboundDatasourceHouseWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s delete dlc datasource house attachment failed, reason:%+v", logId, reqErr) + return reqErr + } + + return nil +} diff --git a/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.md b/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.md new file mode 100644 index 0000000000..4b77d4a986 --- /dev/null +++ b/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment.md @@ -0,0 +1,27 @@ +Provides a resource to create a DLC datasource house attachment + +Example Usage + +```hcl +resource "tencentcloud_dlc_datasource_house_attachment" "example" { + datasource_connection_name = "tf-example" + datasource_connection_type = "Mysql" + datasource_connection_config { + mysql { + jdbc_url = "" + user = "" + password = "" + location { + vpc_id = "vpc-khkyabcd" + vpc_cidr_block = "192.168.0.0/16" + subnet_id = "subnet-o7n9eg12" + subnet_cidr_block = "192.168.0.0/24" + } + } + } + + data_engine_names = ["engine_demo"] + network_connection_type = 4 + network_connection_desc = "remark." +} +``` diff --git a/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment_test.go b/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment_test.go new file mode 100644 index 0000000000..d29c0b5983 --- /dev/null +++ b/tencentcloud/services/dlc/resource_tc_dlc_datasource_house_attachment_test.go @@ -0,0 +1,79 @@ +package dlc_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + tcacctest "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/acctest" +) + +func TestAccTencentCloudDlcDatasourceHouseAttachmentResource_basic(t *testing.T) { + t.Parallel() + resource.Test(t, resource.TestCase{ + PreCheck: func() { + tcacctest.AccPreCheck(t) + }, + Providers: tcacctest.AccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDlcDatasourceHouseAttachment, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("tencentcloud_dlc_datasource_house_attachment.example", "id"), + ), + }, + { + Config: testAccDlcDatasourceHouseAttachmentUpdate, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("tencentcloud_dlc_datasource_house_attachment.example", "id"), + ), + }, + }, + }) +} + +const testAccDlcDatasourceHouseAttachment = ` +resource "tencentcloud_dlc_datasource_house_attachment" "example" { + datasource_connection_name = "tf-example" + datasource_connection_type = "Mysql" + datasource_connection_config { + mysql { + jdbc_url = "" + user = "" + password = "" + location { + vpc_id = "vpc-khkyabcd" + vpc_cidr_block = "192.168.0.0/16" + subnet_id = "subnet-o7n9eg12" + subnet_cidr_block = "192.168.0.0/24" + } + } + } + data_engine_names = ["engine_demo"] + network_connection_type = 4 + network_connection_desc = "remark." +} +` + +const testAccDlcDatasourceHouseAttachmentUpdate = ` +resource "tencentcloud_dlc_datasource_house_attachment" "example" { + datasource_connection_name = "tf-example" + datasource_connection_type = "Mysql" + datasource_connection_config { + mysql { + jdbc_url = "" + user = "" + password = "" + location { + vpc_id = "vpc-khkyabcd" + vpc_cidr_block = "192.168.0.0/16" + subnet_id = "subnet-o7n9eg12" + subnet_cidr_block = "192.168.0.0/24" + } + } + } + data_engine_names = ["engine_demo"] + network_connection_type = 4 + network_connection_desc = "remark update." +} +` diff --git a/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.go b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.go index ee78f4ecdb..d194e741db 100644 --- a/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.go +++ b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.go @@ -83,6 +83,7 @@ func ResourceTencentCloudDlcStandardEngineResourceGroup() *schema.Resource { "static_config_pairs": { Type: schema.TypeList, Optional: true, + Computed: true, Description: "Static parameters of the resource group, which require restarting the resource group to take effect.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -103,6 +104,7 @@ func ResourceTencentCloudDlcStandardEngineResourceGroup() *schema.Resource { "dynamic_config_pairs": { Type: schema.TypeList, Optional: true, + Computed: true, Description: "Dynamic parameters of the resource group, effective in the next task.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -207,6 +209,13 @@ func ResourceTencentCloudDlcStandardEngineResourceGroup() *schema.Resource { Description: "Only the SQL resource group resource limit, only used for the express module.", }, + "running_state": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + Description: "The state of the resource group. true: launch standard engine resource group; false: pause standard engine resource group. Default is true.", + }, + // computed "engine_resource_group_id": { Type: schema.TypeString, @@ -426,6 +435,78 @@ func resourceTencentCloudDlcStandardEngineResourceGroupCreate(d *schema.Resource return waitErr } + if v, ok := d.GetOkExists("running_state"); ok { + // pause resource group + if !v.(bool) { + request := dlcv20210125.NewPauseStandardEngineResourceGroupsRequest() + request.EngineResourceGroupNames = helper.Strings([]string{engineResourceGroupName}) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().PauseStandardEngineResourceGroupsWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Pause dlc standard engine resource group failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s pause dlc standard engine resource group failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 3 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + } + } + return resourceTencentCloudDlcStandardEngineResourceGroupRead(d, meta) } @@ -547,6 +628,14 @@ func resourceTencentCloudDlcStandardEngineResourceGroupRead(d *schema.ResourceDa _ = d.Set("engine_resource_group_id", respData.EngineResourceGroupId) } + if respData.ResourceGroupState != nil { + if *respData.ResourceGroupState == 2 { + _ = d.Set("running_state", true) + } else if *respData.ResourceGroupState == 3 { + _ = d.Set("running_state", false) + } + } + return nil } @@ -870,6 +959,146 @@ func resourceTencentCloudDlcStandardEngineResourceGroupUpdate(d *schema.Resource } } + if d.HasChange("running_state") { + if v, ok := d.GetOkExists("running_state"); ok { + if v.(bool) { + request := dlcv20210125.NewLaunchStandardEngineResourceGroupsRequest() + request.EngineResourceGroupNames = helper.Strings([]string{engineResourceGroupName}) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().LaunchStandardEngineResourceGroupsWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Launch dlc standard engine resource group failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s launch dlc standard engine resource group failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 2 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + } else { + request := dlcv20210125.NewPauseStandardEngineResourceGroupsRequest() + request.EngineResourceGroupNames = helper.Strings([]string{engineResourceGroupName}) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().PauseStandardEngineResourceGroupsWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Pause dlc standard engine resource group failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s pause dlc standard engine resource group failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 3 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + } + } + } + return resourceTencentCloudDlcStandardEngineResourceGroupRead(d, meta) } diff --git a/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.md b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.md index 90bfe7f26c..57601a5510 100644 --- a/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.md +++ b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group.md @@ -4,6 +4,8 @@ Provides a resource to create a DLC standard engine resource group ~> **NOTE:** Field `auto_pause_time` is meaningful only when the values of fields `auto_launch` and `auto_pause` are 0. +~> **NOTE:** If you need to set the `static_config_pairs` or `dynamic_config_pairs`, it is recommended to use resource `tencentcloud_dlc_standard_engine_resource_group_config_info`. + Example Usage Only SQL analysis resource group @@ -28,6 +30,7 @@ resource "tencentcloud_dlc_standard_engine_resource_group" "example" { resource_group_scene = "SparkSQL" spark_spec_mode = "fast" spark_size = 16 + running_state = true } ``` @@ -47,5 +50,6 @@ resource "tencentcloud_dlc_standard_engine_resource_group" "example" { image_type = "built-in" image_version = "97319759-0b80-48b4-a7a7-436d9ef3b666" image_name = "pytorch-v2.5.1" + running_state = false } ``` diff --git a/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.go b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.go new file mode 100644 index 0000000000..92dbd6ff4c --- /dev/null +++ b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.go @@ -0,0 +1,816 @@ +package dlc + +import ( + "context" + "fmt" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + dlcv20210125 "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dlc/v20210125" + + tccommon "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/common" + "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/internal/helper" +) + +func ResourceTencentCloudDlcStandardEngineResourceGroupConfigInfo() *schema.Resource { + return &schema.Resource{ + Create: resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoCreate, + Read: resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoRead, + Update: resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoUpdate, + Delete: resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + Schema: map[string]*schema.Schema{ + "engine_resource_group_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Standard engine resource group name.", + }, + + "static_conf_context": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Static config context.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "params": { + Type: schema.TypeSet, + Optional: true, + Description: "Collection of bound working groups.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "config_item": { + Type: schema.TypeString, + Required: true, + Description: "Configuration item.", + }, + "config_value": { + Type: schema.TypeString, + Required: true, + Description: "Configuration value.", + }, + }, + }, + }, + }, + }, + }, + + "dynamic_conf_context": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Dynamic config context.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "params": { + Type: schema.TypeSet, + Optional: true, + Description: "Collection of bound working groups.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "config_item": { + Type: schema.TypeString, + Required: true, + Description: "Configuration item.", + }, + "config_value": { + Type: schema.TypeString, + Required: true, + Description: "Configuration value.", + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoCreate(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_standard_engine_resource_group_config_info.create")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + request = dlcv20210125.NewUpdateStandardEngineResourceGroupConfigInfoRequest() + engineResourceGroupName string + ) + + if v, ok := d.GetOk("engine_resource_group_name"); ok { + request.EngineResourceGroupName = helper.String(v.(string)) + engineResourceGroupName = v.(string) + } + + confContextList := []*dlcv20210125.UpdateConfContext{} + if v, ok := d.GetOk("static_conf_context"); ok { + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("StaticConfigType") + for _, item := range v.([]interface{}) { + dMap := item.(map[string]interface{}) + if v, ok := dMap["params"]; ok { + for _, item := range v.(*schema.Set).List() { + param := dlcv20210125.Param{} + dMap := item.(map[string]interface{}) + if v, ok := dMap["config_item"].(string); ok && v != "" { + param.ConfigItem = helper.String(v) + } + + if v, ok := dMap["config_value"].(string); ok && v != "" { + param.ConfigValue = helper.String(v) + } + + param.Operate = helper.String("ADD") + confContext.Params = append(confContext.Params, ¶m) + } + } + } + + confContextList = append(confContextList, &confContext) + } + + if v, ok := d.GetOk("dynamic_conf_context"); ok { + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("DynamicConfigType") + for _, item := range v.([]interface{}) { + dMap := item.(map[string]interface{}) + if v, ok := dMap["params"]; ok { + for _, item := range v.(*schema.Set).List() { + param := dlcv20210125.Param{} + dMap := item.(map[string]interface{}) + if v, ok := dMap["config_item"].(string); ok && v != "" { + param.ConfigItem = helper.String(v) + } + + if v, ok := dMap["config_value"].(string); ok && v != "" { + param.ConfigValue = helper.String(v) + } + + param.Operate = helper.String("ADD") + confContext.Params = append(confContext.Params, ¶m) + } + } + } + + confContextList = append(confContextList, &confContext) + } + + request.UpdateConfContext = confContextList + request.IsEffectiveNow = helper.Int64(0) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UpdateStandardEngineResourceGroupConfigInfoWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Create dlc standard engine resource group config info failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s create dlc standard engine resource group config info failed, reason:%+v", logId, reqErr) + return reqErr + } + + d.SetId(engineResourceGroupName) + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 2 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + + return resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoRead(d, meta) +} + +func resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoRead(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_standard_engine_resource_group_config_info.read")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + service = DlcService{client: meta.(tccommon.ProviderMeta).GetAPIV3Conn()} + engineResourceGroupName = d.Id() + ) + + respData, err := service.DescribeDlcStandardEngineResourceGroupConfigInfoById(ctx, engineResourceGroupName) + if err != nil { + return err + } + + if respData == nil { + log.Printf("[WARN]%s resource `tencentcloud_dlc_standard_engine_resource_group_config_info` [%s] not found, please check if it has been deleted.\n", logId, d.Id()) + d.SetId("") + return nil + } + + _ = d.Set("engine_resource_group_name", engineResourceGroupName) + + if respData.StaticConfigPairs != nil { + dMap := make(map[string]interface{}, 0) + tmpList := make([]map[string]interface{}, 0, 1) + for _, configPair := range respData.StaticConfigPairs { + configPairMap := map[string]interface{}{} + if configPair.ConfigItem != nil { + configPairMap["config_item"] = configPair.ConfigItem + } + + if configPair.ConfigValue != nil { + configPairMap["config_value"] = configPair.ConfigValue + } + + tmpList = append(tmpList, configPairMap) + } + + dMap["params"] = tmpList + _ = d.Set("static_conf_context", []interface{}{dMap}) + } + + if respData.DynamicConfigPairs != nil { + dMap := make(map[string]interface{}, 0) + tmpList := make([]map[string]interface{}, 0, 1) + for _, configPair := range respData.DynamicConfigPairs { + configPairMap := map[string]interface{}{} + if configPair.ConfigItem != nil { + configPairMap["config_item"] = configPair.ConfigItem + } + + if configPair.ConfigValue != nil { + configPairMap["config_value"] = configPair.ConfigValue + } + + tmpList = append(tmpList, configPairMap) + } + + dMap["params"] = tmpList + _ = d.Set("dynamic_conf_context", []interface{}{dMap}) + } + + return nil +} + +func resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoUpdate(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_standard_engine_resource_group_config_info.update")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + engineResourceGroupName = d.Id() + ) + + if d.HasChange("static_conf_context") { + oldInterface, newInterface := d.GetChange("static_conf_context") + olds := oldInterface.(*schema.Set) + news := newInterface.(*schema.Set) + remove := olds.Difference(news).List() + add := news.Difference(olds).List() + + if len(remove) > 0 { + request := dlcv20210125.NewUpdateStandardEngineResourceGroupConfigInfoRequest() + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("StaticConfigType") + for _, item := range remove { + dMap := item.(map[string]interface{}) + param := dlcv20210125.Param{} + if v, ok := dMap["config_item"].(string); ok && v != "" { + param.ConfigItem = helper.String(v) + } + + if v, ok := dMap["config_value"].(string); ok && v != "" { + param.ConfigValue = helper.String(v) + } + + param.Operate = helper.String("DELETE") + confContext.Params = append(confContext.Params, ¶m) + } + + request.UpdateConfContext = append(request.UpdateConfContext, &confContext) + request.EngineResourceGroupName = &engineResourceGroupName + request.IsEffectiveNow = helper.Int64(0) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UpdateStandardEngineResourceGroupConfigInfoWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Update dlc standard engine resource group config info failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s update dlc standard engine resource group config info failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 2 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + } + + if len(add) > 0 { + request := dlcv20210125.NewUpdateStandardEngineResourceGroupConfigInfoRequest() + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("StaticConfigType") + for _, item := range add { + dMap := item.(map[string]interface{}) + param := dlcv20210125.Param{} + if v, ok := dMap["config_item"].(string); ok && v != "" { + param.ConfigItem = helper.String(v) + } + + if v, ok := dMap["config_value"].(string); ok && v != "" { + param.ConfigValue = helper.String(v) + } + + param.Operate = helper.String("ADD") + confContext.Params = append(confContext.Params, ¶m) + } + + request.UpdateConfContext = append(request.UpdateConfContext, &confContext) + request.EngineResourceGroupName = &engineResourceGroupName + request.IsEffectiveNow = helper.Int64(0) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UpdateStandardEngineResourceGroupConfigInfoWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Update dlc standard engine resource group config info failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s update dlc standard engine resource group config info failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 2 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + } + } + + if d.HasChange("dynamic_conf_context") { + oldInterface, newInterface := d.GetChange("dynamic_conf_context") + olds := oldInterface.(*schema.Set) + news := newInterface.(*schema.Set) + remove := olds.Difference(news).List() + add := news.Difference(olds).List() + + if len(remove) > 0 { + request := dlcv20210125.NewUpdateStandardEngineResourceGroupConfigInfoRequest() + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("DynamicConfigType") + for _, item := range remove { + dMap := item.(map[string]interface{}) + param := dlcv20210125.Param{} + if v, ok := dMap["config_item"].(string); ok && v != "" { + param.ConfigItem = helper.String(v) + } + + if v, ok := dMap["config_value"].(string); ok && v != "" { + param.ConfigValue = helper.String(v) + } + + param.Operate = helper.String("DELETE") + confContext.Params = append(confContext.Params, ¶m) + } + + request.UpdateConfContext = append(request.UpdateConfContext, &confContext) + request.EngineResourceGroupName = &engineResourceGroupName + request.IsEffectiveNow = helper.Int64(0) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UpdateStandardEngineResourceGroupConfigInfoWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Update dlc standard engine resource group config info failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s update dlc standard engine resource group config info failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 2 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + } + + if len(add) > 0 { + request := dlcv20210125.NewUpdateStandardEngineResourceGroupConfigInfoRequest() + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("DynamicConfigType") + for _, item := range add { + dMap := item.(map[string]interface{}) + param := dlcv20210125.Param{} + if v, ok := dMap["config_item"].(string); ok && v != "" { + param.ConfigItem = helper.String(v) + } + + if v, ok := dMap["config_value"].(string); ok && v != "" { + param.ConfigValue = helper.String(v) + } + + param.Operate = helper.String("ADD") + confContext.Params = append(confContext.Params, ¶m) + } + + request.UpdateConfContext = append(request.UpdateConfContext, &confContext) + request.EngineResourceGroupName = &engineResourceGroupName + request.IsEffectiveNow = helper.Int64(0) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UpdateStandardEngineResourceGroupConfigInfoWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Update dlc standard engine resource group config info failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s update dlc standard engine resource group config info failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 2 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + } + } + + return resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoRead(d, meta) +} + +func resourceTencentCloudDlcStandardEngineResourceGroupConfigInfoDelete(d *schema.ResourceData, meta interface{}) error { + defer tccommon.LogElapsed("resource.tencentcloud_dlc_standard_engine_resource_group_config_info.delete")() + defer tccommon.InconsistentCheck(d, meta)() + + var ( + logId = tccommon.GetLogId(tccommon.ContextNil) + ctx = tccommon.NewResourceLifeCycleHandleFuncContext(context.Background(), logId, d, meta) + service = DlcService{client: meta.(tccommon.ProviderMeta).GetAPIV3Conn()} + request = dlcv20210125.NewUpdateStandardEngineResourceGroupConfigInfoRequest() + engineResourceGroupName = d.Id() + ) + + // get all conf context + respData, err := service.DescribeDlcStandardEngineResourceGroupConfigInfoById(ctx, engineResourceGroupName) + if err != nil { + return err + } + + if respData == nil { + return nil + } + + // delete all + if respData.StaticConfigPairs != nil && len(respData.StaticConfigPairs) > 0 { + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("StaticConfigType") + for _, item := range respData.StaticConfigPairs { + param := dlcv20210125.Param{} + if item.ConfigItem != nil { + param.ConfigItem = item.ConfigItem + } + + if item.ConfigValue != nil { + param.ConfigValue = item.ConfigValue + } + + param.Operate = helper.String("DELETE") + confContext.Params = append(confContext.Params, ¶m) + } + + request.UpdateConfContext = append(request.UpdateConfContext, &confContext) + } + + if respData.DynamicConfigPairs != nil && len(respData.DynamicConfigPairs) > 0 { + confContext := dlcv20210125.UpdateConfContext{} + confContext.ConfigType = helper.String("DynamicConfigType") + for _, item := range respData.DynamicConfigPairs { + param := dlcv20210125.Param{} + if item.ConfigItem != nil { + param.ConfigItem = item.ConfigItem + } + + if item.ConfigValue != nil { + param.ConfigValue = item.ConfigValue + } + + param.Operate = helper.String("DELETE") + confContext.Params = append(confContext.Params, ¶m) + } + + request.UpdateConfContext = append(request.UpdateConfContext, &confContext) + } + + request.EngineResourceGroupName = &engineResourceGroupName + request.IsEffectiveNow = helper.Int64(0) + reqErr := resource.Retry(tccommon.WriteRetryTimeout, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().UpdateStandardEngineResourceGroupConfigInfoWithContext(ctx, request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Delete dlc standard engine resource group config info failed, Response is nil.")) + } + + return nil + }) + + if reqErr != nil { + log.Printf("[CRITAL]%s delete dlc standard engine resource group config info failed, reason:%+v", logId, reqErr) + return reqErr + } + + // wait + waitErr := resource.Retry(tccommon.WriteRetryTimeout*4, func() *resource.RetryError { + result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseDlcClient().DescribeStandardEngineResourceGroupsWithContext(ctx, &dlcv20210125.DescribeStandardEngineResourceGroupsRequest{ + Filters: []*dlcv20210125.Filter{ + { + Name: helper.String("engine-resource-group-name-unique"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + }, + }) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource groups failed, Response is nil.")) + } + + if result.Response.UserEngineResourceGroupInfos == nil || len(result.Response.UserEngineResourceGroupInfos) == 0 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is nil.")) + } + + if len(result.Response.UserEngineResourceGroupInfos) != 1 { + return resource.NonRetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not 1.")) + } + + state := result.Response.UserEngineResourceGroupInfos[0].ResourceGroupState + if state != nil { + if *state == 2 { + return nil + } + } else { + return resource.NonRetryableError(fmt.Errorf("ResourceGroupState is nil.")) + } + + return resource.RetryableError(fmt.Errorf("UserEngineResourceGroupInfos is not ready, state:%d", *state)) + }) + + if waitErr != nil { + log.Printf("[CRITAL]%s wait for dlc standard engine resource group failed, reason:%+v", logId, waitErr) + return waitErr + } + + return nil +} diff --git a/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.md b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.md new file mode 100644 index 0000000000..a55c09a17e --- /dev/null +++ b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info.md @@ -0,0 +1,39 @@ +Provides a resource to create a DLC standard engine resource group config info + +~> **NOTE:** This resource must exclusive in one engine resource group, do not declare additional config resources of this conf context elsewhere. + +~> **NOTE:** If you use the `tencentcloud_dlc_standard_engine_resource_group_config_info`. Please do not set `static_config_pairs` or `dynamic_config_pairs` in resource `tencentcloud_dlc_standard_engine_resource_group` simultaneously. + +Example Usage + +```hcl +resource "tencentcloud_dlc_standard_engine_resource_group_config_info" "example" { + engine_resource_group_name = "tf-example" + static_conf_context { + params { + config_item = "item1" + config_value = "value1" + } + + params { + config_item = "item2" + config_value = "value2" + } + } + + dynamic_conf_context { + params { + config_item = "item3" + config_value = "value3" + } + } +} +``` + +Import + +DLC standard engine resource group config info can be imported using the id, e.g. + +``` +terraform import tencentcloud_dlc_standard_engine_resource_group_config_info.example tf-example +``` diff --git a/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info_test.go b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info_test.go new file mode 100644 index 0000000000..c69023be86 --- /dev/null +++ b/tencentcloud/services/dlc/resource_tc_dlc_standard_engine_resource_group_config_info_test.go @@ -0,0 +1,87 @@ +package dlc_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + tcacctest "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/acctest" +) + +func TestAccTencentCloudDlcStandardEngineResourceGroupConfigInfoResource_basic(t *testing.T) { + t.Parallel() + resource.Test(t, resource.TestCase{ + PreCheck: func() { + tcacctest.AccPreCheck(t) + }, + Providers: tcacctest.AccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDlcStandardEngineResourceGroupConfigInfo, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("tencentcloud_dlc_standard_engine_resource_group_config_info.example", "id"), + ), + }, + { + Config: testAccDlcStandardEngineResourceGroupConfigInfoUpdate, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("tencentcloud_dlc_standard_engine_resource_group_config_info.example", "id"), + ), + }, + { + ResourceName: "tencentcloud_dlc_standard_engine_resource_group_config_info.example", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +const testAccDlcStandardEngineResourceGroupConfigInfo = ` +resource "tencentcloud_dlc_standard_engine_resource_group_config_info" "example" { + engine_resource_group_name = "tf-example" + static_conf_context { + params { + config_item = "item1" + config_value = "value1" + } + + params { + config_item = "item2" + config_value = "value2" + } + } + + dynamic_conf_context { + params { + config_item = "item3" + config_value = "value3" + } + } +} + +` + +const testAccDlcStandardEngineResourceGroupConfigInfoUpdate = ` +resource "tencentcloud_dlc_standard_engine_resource_group_config_info" "example" { + engine_resource_group_name = "tf-example" + static_conf_context { + params { + config_item = "item1" + config_value = "value1" + } + } + + dynamic_conf_context { + params { + config_item = "item3" + config_value = "value3" + } + + params { + config_item = "item2" + config_value = "value2" + } + } +} +` diff --git a/tencentcloud/services/dlc/service_tencentcloud_dlc.go b/tencentcloud/services/dlc/service_tencentcloud_dlc.go index ca1ef3e5de..1b30c3a3d3 100644 --- a/tencentcloud/services/dlc/service_tencentcloud_dlc.go +++ b/tencentcloud/services/dlc/service_tencentcloud_dlc.go @@ -1435,3 +1435,90 @@ func (me *DlcService) DescribeDlcAttachDataMaskPolicyById(ctx context.Context, c ret = response.Response.Table return } + +func (me *DlcService) DescribeDlcStandardEngineResourceGroupConfigInfoById(ctx context.Context, engineResourceGroupName string) (ret *dlc.StandardEngineResourceGroupConfigInfo, errRet error) { + logId := tccommon.GetLogId(ctx) + + request := dlc.NewDescribeStandardEngineResourceGroupConfigInfoRequest() + response := dlc.NewDescribeStandardEngineResourceGroupConfigInfoResponse() + request.Filters = []*dlc.Filter{ + { + Name: helper.String("engine-resource-group-name"), + Values: helper.Strings([]string{engineResourceGroupName}), + }, + } + + defer func() { + if errRet != nil { + log.Printf("[CRITAL]%s api[%s] fail, request body [%s], reason[%s]\n", logId, request.GetAction(), request.ToJsonString(), errRet.Error()) + } + }() + + errRet = resource.Retry(tccommon.ReadRetryTimeout, func() *resource.RetryError { + ratelimit.Check(request.GetAction()) + result, e := me.client.UseDlcClient().DescribeStandardEngineResourceGroupConfigInfo(request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil || result.Response.StandardEngineResourceGroupConfigInfos == nil { + return resource.NonRetryableError(fmt.Errorf("Describe dlc standard engine resource group config info failed, Response is nil.")) + } + + response = result + return nil + }) + + if errRet != nil { + log.Printf("[CRITAL]%s describe dlc standard engine resource group config info failed, reason:%+v", logId, errRet) + return + } + + if len(response.Response.StandardEngineResourceGroupConfigInfos) < 1 { + return + } + + ret = response.Response.StandardEngineResourceGroupConfigInfos[0] + return +} + +func (me *DlcService) DescribeDlcDatasourceHouseAttachmentById(ctx context.Context, datasourceConnectionName string) (ret *dlc.NetworkConnection, errRet error) { + logId := tccommon.GetLogId(ctx) + + request := dlc.NewDescribeNetworkConnectionsRequest() + response := dlc.NewDescribeNetworkConnectionsResponse() + request.NetworkConnectionName = helper.String(datasourceConnectionName) + + defer func() { + if errRet != nil { + log.Printf("[CRITAL]%s api[%s] fail, request body [%s], reason[%s]\n", logId, request.GetAction(), request.ToJsonString(), errRet.Error()) + } + }() + + errRet = resource.Retry(tccommon.ReadRetryTimeout, func() *resource.RetryError { + ratelimit.Check(request.GetAction()) + result, e := me.client.UseDlcClient().DescribeNetworkConnections(request) + if e != nil { + return tccommon.RetryError(e) + } else { + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), result.ToJsonString()) + } + + if result == nil || result.Response == nil || result.Response.NetworkConnectionSet == nil || len(result.Response.NetworkConnectionSet) < 1 { + return resource.NonRetryableError(fmt.Errorf("Describe dlc network connections failed, Response is nil.")) + } + + response = result + return nil + }) + + if errRet != nil { + log.Printf("[CRITAL]%s describe dlc network connections failed, reason:%+v", logId, errRet) + return + } + + ret = response.Response.NetworkConnectionSet[0] + return +} diff --git a/website/docs/r/dlc_datasource_house_attachment.html.markdown b/website/docs/r/dlc_datasource_house_attachment.html.markdown new file mode 100644 index 0000000000..23fd23d560 --- /dev/null +++ b/website/docs/r/dlc_datasource_house_attachment.html.markdown @@ -0,0 +1,278 @@ +--- +subcategory: "Data Lake Compute(DLC)" +layout: "tencentcloud" +page_title: "TencentCloud: tencentcloud_dlc_datasource_house_attachment" +sidebar_current: "docs-tencentcloud-resource-dlc_datasource_house_attachment" +description: |- + Provides a resource to create a DLC datasource house attachment +--- + +# tencentcloud_dlc_datasource_house_attachment + +Provides a resource to create a DLC datasource house attachment + +## Example Usage + +```hcl +resource "tencentcloud_dlc_datasource_house_attachment" "example" { + datasource_connection_name = "tf-example" + datasource_connection_type = "Mysql" + datasource_connection_config { + mysql { + jdbc_url = "" + user = "" + password = "" + location { + vpc_id = "vpc-khkyabcd" + vpc_cidr_block = "192.168.0.0/16" + subnet_id = "subnet-o7n9eg12" + subnet_cidr_block = "192.168.0.0/24" + } + } + } + + data_engine_names = ["engine_demo"] + network_connection_type = 4 + network_connection_desc = "remark." +} +``` + +## Argument Reference + +The following arguments are supported: + +* `data_engine_names` - (Required, Set: [`String`], ForceNew) Engine name, only one engine can be bound. +* `datasource_connection_config` - (Required, List, ForceNew) Data source network configuration. +* `datasource_connection_name` - (Required, String, ForceNew) Network configuration name. +* `datasource_connection_type` - (Required, String, ForceNew) Data source type. Allow value: Mysql, HiveCos, HiveHdfs, HiveCHdfs, Kafka, OtherDatasourceConnection, PostgreSql, SqlServer, ClickHouse, Elasticsearch, TDSQLPostgreSql, TCHouseD, TccHive. +* `network_connection_type` - (Required, Int, ForceNew) Network type, 2-cross-source type, 4-enhanced type. +* `network_connection_desc` - (Optional, String) Network configuration description. + +The `click_house` object of `datasource_connection_config` supports the following: + +* `db_name` - (Optional, String, ForceNew) Default database name. +* `instance_id` - (Optional, String, ForceNew) Unique ID of the data source instance. +* `instance_name` - (Optional, String, ForceNew) Name of the data source. +* `jdbc_url` - (Optional, String, ForceNew) JDBC access link for the data source. +* `location` - (Optional, List, ForceNew) VPC and subnet information for the data source. +* `password` - (Optional, String, ForceNew) Data source access password, requires base64 encoding. +* `user` - (Optional, String, ForceNew) Username for accessing the data source. + +The `datasource_connection_config` object supports the following: + +* `click_house` - (Optional, List, ForceNew) Properties of ClickHouse data source connection. +* `elasticsearch` - (Optional, List, ForceNew) Properties of Elasticsearch data source connection. +* `hive` - (Optional, List, ForceNew) Properties of Hive data source connection. +* `kafka` - (Optional, List, ForceNew) Properties of Kafka data source connection. +* `mysql` - (Optional, List, ForceNew) Properties of MySQL data source connection. +* `other_datasource_connection` - (Optional, List, ForceNew) Properties of other data source connection. +* `postgre_sql` - (Optional, List, ForceNew) Properties of PostgreSQL data source connection. +* `sql_server` - (Optional, List, ForceNew) Properties of SQLServer data source connection. +* `tc_house_d` - (Optional, List, ForceNew) Properties of Doris data source connection. +* `tcc_hive` - (Optional, List, ForceNew) TccHive data catalog connection information. +* `tdsql_postgre_sql` - (Optional, List, ForceNew) Properties of TDSQL-PostgreSQL data source connection. + +The `elasticsearch` object of `datasource_connection_config` supports the following: + +* `db_name` - (Optional, String, ForceNew) Default database name. +* `instance_id` - (Optional, String, ForceNew) Data source ID. +* `instance_name` - (Optional, String, ForceNew) Data source name. +* `location` - (Optional, List, ForceNew) VPC and subnet information for the data source. +* `password` - (Optional, String, ForceNew) Password, requires base64 encoding. +* `service_info` - (Optional, List, ForceNew) IP and port information for accessing Elasticsearch. +* `user` - (Optional, String, ForceNew) Username. + +The `hive` object of `datasource_connection_config` supports the following: + +* `location` - (Required, List, ForceNew) Private network information where the data source is located. +* `meta_store_url` - (Required, String, ForceNew) Address of Hive metastore. +* `type` - (Required, String, ForceNew) Hive data source type, representing data storage location, COS or HDFS. +* `bucket_url` - (Optional, String, ForceNew) If the type is COS, COS bucket connection needs to be filled in. +* `hdfs_properties` - (Optional, String, ForceNew) JSON string. If the type is HDFS, this field needs to be filled in. +* `high_availability` - (Optional, Bool, ForceNew) If the type is HDFS, high availability needs to be selected. +* `hive_version` - (Optional, String, ForceNew) Version number of Hive component in EMR cluster. +* `instance_id` - (Optional, String, ForceNew) EMR cluster ID. +* `instance_name` - (Optional, String, ForceNew) EMR cluster name. +* `kerberos_enable` - (Optional, Bool, ForceNew) Whether to enable Kerberos. +* `kerberos_info` - (Optional, List, ForceNew) Kerberos details. +* `mysql` - (Optional, List, ForceNew) Metadata database information for Hive. +* `user` - (Optional, String, ForceNew) If the type is HDFS, a username is required. + +The `kafka` object of `datasource_connection_config` supports the following: + +* `instance_id` - (Required, String, ForceNew) Kafka instance ID. +* `location` - (Required, List, ForceNew) Network information for Kafka data source. + +The `kerberos_info` object of `hive` supports the following: + +* `key_tab` - (Optional, String, ForceNew) KeyTab file value. +* `krb5_conf` - (Optional, String, ForceNew) Krb5Conf file value. +* `service_principal` - (Optional, String, ForceNew) Service principal. + +The `location` object of `click_house` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `elasticsearch` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `hive` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `kafka` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `mysql` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `other_datasource_connection` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `postgre_sql` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `sql_server` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `tc_house_d` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `location` object of `tdsql_postgre_sql` supports the following: + +* `subnet_cidr_block` - (Required, String, ForceNew) Subnet IPv4 CIDR. +* `subnet_id` - (Required, String, ForceNew) Subnet instance ID where the data connection is located, such as 'subnet-bthucmmy'. +* `vpc_cidr_block` - (Required, String, ForceNew) VPC IPv4 CIDR. +* `vpc_id` - (Required, String, ForceNew) VPC instance ID where the data connection is located, such as 'vpc-azd4dt1c'. + +The `mysql` object of `datasource_connection_config` supports the following: + +* `jdbc_url` - (Required, String, ForceNew) JDBC URL for connecting to MySQL. +* `location` - (Required, List, ForceNew) Network information for MySQL data source. +* `password` - (Required, String, ForceNew) MySQL password. +* `user` - (Required, String, ForceNew) Username. +* `db_name` - (Optional, String, ForceNew) Database name. +* `instance_id` - (Optional, String, ForceNew) Database instance ID, consistent with the database side. +* `instance_name` - (Optional, String, ForceNew) Database instance name, consistent with the database side. + +The `mysql` object of `hive` supports the following: + +* `jdbc_url` - (Required, String, ForceNew) JDBC URL for connecting to MySQL. +* `location` - (Required, List, ForceNew) Network information for MySQL data source. +* `password` - (Required, String, ForceNew) MySQL password. +* `user` - (Required, String, ForceNew) Username. +* `db_name` - (Optional, String, ForceNew) Database name. +* `instance_id` - (Optional, String, ForceNew) Database instance ID, consistent with the database side. +* `instance_name` - (Optional, String, ForceNew) Database instance name, consistent with the database side. + +The `other_datasource_connection` object of `datasource_connection_config` supports the following: + +* `location` - (Required, List, ForceNew) Network parameters. + +The `postgre_sql` object of `datasource_connection_config` supports the following: + +* `db_name` - (Optional, String, ForceNew) Default database name. +* `instance_id` - (Optional, String, ForceNew) Unique ID of the data source instance. +* `instance_name` - (Optional, String, ForceNew) Name of the data source. +* `jdbc_url` - (Optional, String, ForceNew) JDBC access link for the data source. +* `location` - (Optional, List, ForceNew) VPC and subnet information for the data source. +* `password` - (Optional, String, ForceNew) Data source access password, requires base64 encoding. +* `user` - (Optional, String, ForceNew) Username for accessing the data source. + +The `service_info` object of `elasticsearch` supports the following: + +* `ip` - (Optional, String, ForceNew) IP information. +* `port` - (Optional, Int, ForceNew) Port information. + +The `sql_server` object of `datasource_connection_config` supports the following: + +* `db_name` - (Optional, String, ForceNew) Default database name. +* `instance_id` - (Optional, String, ForceNew) Unique ID of the data source instance. +* `instance_name` - (Optional, String, ForceNew) Name of the data source. +* `jdbc_url` - (Optional, String, ForceNew) JDBC access link for the data source. +* `location` - (Optional, List, ForceNew) VPC and subnet information for the data source. +* `password` - (Optional, String, ForceNew) Data source access password, requires base64 encoding. +* `user` - (Optional, String, ForceNew) Username for accessing the data source. + +The `tc_house_d` object of `datasource_connection_config` supports the following: + +* `access_info` - (Optional, String, ForceNew) Access information. +* `db_name` - (Optional, String, ForceNew) Default database name. +* `instance_id` - (Optional, String, ForceNew) Unique ID of the data source instance. +* `instance_name` - (Optional, String, ForceNew) Data source name. +* `jdbc_url` - (Optional, String, ForceNew) JDBC of the data source. +* `location` - (Optional, List, ForceNew) VPC and subnet information for the data source. +* `password` - (Optional, String, ForceNew) Data source access password, requires base64 encoding. +* `user` - (Optional, String, ForceNew) User for accessing the data source. + +The `tcc_connection` object of `tcc_hive` supports the following: + +* `clb_ip` - (Optional, String, ForceNew) Service CLB IP. +* `clb_port` - (Optional, String, ForceNew) Service CLB port. +* `subnet_cidr_block` - (Optional, String, ForceNew) Subnet CIDR. +* `subnet_id` - (Optional, String, ForceNew) Subnet instance ID. +* `vpc_cidr_block` - (Optional, String, ForceNew) VPC CIDR. +* `vpc_id` - (Optional, String, ForceNew) VPC instance ID. + +The `tcc_hive` object of `datasource_connection_config` supports the following: + +* `endpoint_service_id` - (Optional, String, ForceNew) Endpoint service ID. +* `hive_version` - (Optional, String, ForceNew) Hive version. +* `hms_endpoint_service_id` - (Optional, String, ForceNew) HMS endpoint service ID. +* `instance_id` - (Optional, String, ForceNew) Instance ID. +* `instance_name` - (Optional, String, ForceNew) Instance name. +* `meta_store_url` - (Optional, String, ForceNew) Thrift connection address. +* `tcc_connection` - (Optional, List, ForceNew) Network information. + +The `tdsql_postgre_sql` object of `datasource_connection_config` supports the following: + +* `db_name` - (Optional, String, ForceNew) Default database name. +* `instance_id` - (Optional, String, ForceNew) Unique ID of the data source instance. +* `instance_name` - (Optional, String, ForceNew) Name of the data source. +* `jdbc_url` - (Optional, String, ForceNew) JDBC access link for the data source. +* `location` - (Optional, List, ForceNew) VPC and subnet information for the data source. +* `password` - (Optional, String, ForceNew) Data source access password, requires base64 encoding. +* `user` - (Optional, String, ForceNew) Username for accessing the data source. + +## Attributes Reference + +In addition to all arguments above, the following attributes are exported: + +* `id` - ID of the resource. + + + diff --git a/website/docs/r/dlc_standard_engine_resource_group.html.markdown b/website/docs/r/dlc_standard_engine_resource_group.html.markdown index a645ccdca7..e6ecaad4ad 100644 --- a/website/docs/r/dlc_standard_engine_resource_group.html.markdown +++ b/website/docs/r/dlc_standard_engine_resource_group.html.markdown @@ -15,6 +15,8 @@ Provides a resource to create a DLC standard engine resource group ~> **NOTE:** Field `auto_pause_time` is meaningful only when the values of fields `auto_launch` and `auto_pause` are 0. +~> **NOTE:** If you need to set the `static_config_pairs` or `dynamic_config_pairs`, it is recommended to use resource `tencentcloud_dlc_standard_engine_resource_group_config_info`. + ## Example Usage ### Only SQL analysis resource group @@ -39,6 +41,7 @@ resource "tencentcloud_dlc_standard_engine_resource_group" "example" { resource_group_scene = "SparkSQL" spark_spec_mode = "fast" spark_size = 16 + running_state = true } ``` @@ -58,6 +61,7 @@ resource "tencentcloud_dlc_standard_engine_resource_group" "example" { image_type = "built-in" image_version = "97319759-0b80-48b4-a7a7-436d9ef3b666" image_name = "pytorch-v2.5.1" + running_state = false } ``` @@ -87,6 +91,7 @@ Example value: image-xxx. If using a built-in image (ImageType is built-in), the * `region_name` - (Optional, String) Custom image location. * `registry_id` - (Optional, String) Custom image instance ID. * `resource_group_scene` - (Optional, String) Resource group scenario. +* `running_state` - (Optional, Bool) The state of the resource group. true: launch standard engine resource group; false: pause standard engine resource group. Default is true. * `size` - (Optional, Int) The AI resource group is valid, and the upper limit of available resources in the resource group must be less than the upper limit of engine resources. * `spark_size` - (Optional, Int) Only the SQL resource group resource limit, only used for the express module. * `spark_spec_mode` - (Optional, String) Only SQL resource group resource configuration mode, fast: fast mode, custom: custom mode. diff --git a/website/docs/r/dlc_standard_engine_resource_group_config_info.html.markdown b/website/docs/r/dlc_standard_engine_resource_group_config_info.html.markdown new file mode 100644 index 0000000000..c6597ca264 --- /dev/null +++ b/website/docs/r/dlc_standard_engine_resource_group_config_info.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Data Lake Compute(DLC)" +layout: "tencentcloud" +page_title: "TencentCloud: tencentcloud_dlc_standard_engine_resource_group_config_info" +sidebar_current: "docs-tencentcloud-resource-dlc_standard_engine_resource_group_config_info" +description: |- + Provides a resource to create a DLC standard engine resource group config info +--- + +# tencentcloud_dlc_standard_engine_resource_group_config_info + +Provides a resource to create a DLC standard engine resource group config info + +~> **NOTE:** This resource must exclusive in one engine resource group, do not declare additional config resources of this conf context elsewhere. + +~> **NOTE:** If you use the `tencentcloud_dlc_standard_engine_resource_group_config_info`. Please do not set `static_config_pairs` or `dynamic_config_pairs` in resource `tencentcloud_dlc_standard_engine_resource_group` simultaneously. + +## Example Usage + +```hcl +resource "tencentcloud_dlc_standard_engine_resource_group_config_info" "example" { + engine_resource_group_name = "tf-example" + static_conf_context { + params { + config_item = "item1" + config_value = "value1" + } + + params { + config_item = "item2" + config_value = "value2" + } + } + + dynamic_conf_context { + params { + config_item = "item3" + config_value = "value3" + } + } +} +``` + +## Argument Reference + +The following arguments are supported: + +* `engine_resource_group_name` - (Required, String, ForceNew) Standard engine resource group name. +* `dynamic_conf_context` - (Optional, List) Dynamic config context. +* `static_conf_context` - (Optional, List) Static config context. + +The `dynamic_conf_context` object supports the following: + +* `params` - (Optional, Set) Collection of bound working groups. + +The `params` object of `dynamic_conf_context` supports the following: + +* `config_item` - (Required, String) Configuration item. +* `config_value` - (Required, String) Configuration value. + +The `params` object of `static_conf_context` supports the following: + +* `config_item` - (Required, String) Configuration item. +* `config_value` - (Required, String) Configuration value. + +The `static_conf_context` object supports the following: + +* `params` - (Optional, Set) Collection of bound working groups. + +## Attributes Reference + +In addition to all arguments above, the following attributes are exported: + +* `id` - ID of the resource. + + + +## Import + +DLC standard engine resource group config info can be imported using the id, e.g. + +``` +terraform import tencentcloud_dlc_standard_engine_resource_group_config_info.example tf-example +``` + diff --git a/website/tencentcloud.erb b/website/tencentcloud.erb index 0c1829d857..c21b8eb315 100644 --- a/website/tencentcloud.erb +++ b/website/tencentcloud.erb @@ -2330,6 +2330,9 @@