Skip to content

Commit

Permalink
Merge pull request #286 from dbt-labs/add-features-global-connections
Browse files Browse the repository at this point in the history
  • Loading branch information
b-per authored Aug 22, 2024
2 parents 089a22c + f248429 commit eb97618
Show file tree
Hide file tree
Showing 9 changed files with 574 additions and 66 deletions.
11 changes: 9 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,16 @@

All notable changes to this project will be documented in this file.

## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.11...HEAD)
## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.12...HEAD)

# [0.3.11](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.9...v0.3.11)
# [0.3.12](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.11...v0.3.12)

### Changes

- Add support for `import` for `dbtcloud_global_connection`
- Add support for Databricks in `dbtcloud_global_connection`

# [0.3.11](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.3.10...v0.3.11)

### Changes

Expand Down
91 changes: 74 additions & 17 deletions docs/resources/global_connection.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ subcategory: ""
description: |-
This resource can be used to create global connections as introduced in dbt Cloud in August 2024.
Those connections are not linked to a project and can be linked to environments from different projects by using the connection_id field in the dbtcloud_environment resource.
For now, only BigQuery and Snowflake connections are supported and the other Data Warehouses can continue using the existing resources dbtcloud_connection and dbtcloud_fabric_connection ,
For now, only a subset of connections are supported and the other Data Warehouses can continue using the existing resources dbtcloud_connection and dbtcloud_fabric_connection ,
but all Data Warehouses will soon be supported under this resource and the other ones will be deprecated in the future.
---

Expand All @@ -15,27 +15,12 @@ This resource can be used to create global connections as introduced in dbt Clou

Those connections are not linked to a project and can be linked to environments from different projects by using the `connection_id` field in the `dbtcloud_environment` resource.

For now, only BigQuery and Snowflake connections are supported and the other Data Warehouses can continue using the existing resources `dbtcloud_connection` and `dbtcloud_fabric_connection` ,
For now, only a subset of connections are supported and the other Data Warehouses can continue using the existing resources `dbtcloud_connection` and `dbtcloud_fabric_connection` ,
but all Data Warehouses will soon be supported under this resource and the other ones will be deprecated in the future.

## Example Usage

```terraform
resource "dbtcloud_global_connection" "snowflake" {
name = "My Snowflake connection"
// we can set Privatelink if needed
private_link_endpoint_id = data.dbtcloud_privatelink_endpoint.my_private_link.id
snowflake = {
account = "my-snowflake-account"
database = "MY_DATABASE"
warehouse = "MY_WAREHOUSE"
client_session_keep_alive = false
allow_sso = true
oauth_client_id = "yourclientid"
oauth_client_secret = "yourclientsecret"
}
}
resource "dbtcloud_global_connection" "bigquery" {
name = "My BigQuery connection"
bigquery = {
Expand All @@ -53,6 +38,34 @@ resource "dbtcloud_global_connection" "bigquery" {
application_secret = "oauth_secret_id"
}
}
resource "dbtcloud_global_connection" "databricks" {
name = "My Databricks connection"
databricks = {
host = "my-databricks-host.cloud.databricks.com"
http_path = "/sql/my/http/path"
// optional fields
catalog = "dbt_catalog"
client_id = "yourclientid"
client_secret = "yourclientsecret"
}
}
resource "dbtcloud_global_connection" "snowflake" {
name = "My Snowflake connection"
// we can set Privatelink if needed
private_link_endpoint_id = data.dbtcloud_privatelink_endpoint.my_private_link.id
snowflake = {
account = "my-snowflake-account"
database = "MY_DATABASE"
warehouse = "MY_WAREHOUSE"
client_session_keep_alive = false
allow_sso = true
oauth_client_id = "yourclientid"
oauth_client_secret = "yourclientsecret"
}
}
```

<!-- schema generated by tfplugindocs -->
Expand All @@ -65,6 +78,7 @@ resource "dbtcloud_global_connection" "bigquery" {
### Optional

- `bigquery` (Attributes) (see [below for nested schema](#nestedatt--bigquery))
- `databricks` (Attributes) Databricks connection configuration (see [below for nested schema](#nestedatt--databricks))
- `private_link_endpoint_id` (String) Private Link Endpoint ID. This ID can be found using the `privatelink_endpoint` data source
- `snowflake` (Attributes) Snowflake connection configuration (see [below for nested schema](#nestedatt--snowflake))

Expand Down Expand Up @@ -109,6 +123,21 @@ Optional:
- `timeout_seconds` (Number) Timeout in seconds for queries


<a id="nestedatt--databricks"></a>
### Nested Schema for `databricks`

Required:

- `host` (String) The hostname of the Databricks cluster or SQL warehouse.
- `http_path` (String) The HTTP path of the Databricks cluster or SQL warehouse.

Optional:

- `catalog` (String) Catalog name if Unity Catalog is enabled in your Databricks workspace.
- `client_id` (String) Required to enable Databricks OAuth authentication for IDE developers.
- `client_secret` (String) Required to enable Databricks OAuth authentication for IDE developers.


<a id="nestedatt--snowflake"></a>
### Nested Schema for `snowflake`

Expand All @@ -125,3 +154,31 @@ Optional:
- `oauth_client_id` (String, Sensitive) OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- `oauth_client_secret` (String, Sensitive) OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- `role` (String) The Snowflake role to use when running queries on the connection

## Import

Import is supported using the following syntax:

```shell
# A project-scoped connection can be imported as a global connection by specifying the connection ID
# Migrating from project-scoped connections to global connections could be done by:
# 1. Adding the config for the global connection and importing it (see below)
# 2. Removing the project-scoped connection from the config AND from the state
# - CAREFUL: If the connection is removed from the config but not the state, it will be destroyed on the next apply


# using import blocks (requires Terraform >= 1.5)
import {
to = dbtcloud_global_connection.my_connection
id = "connection_id"
}

import {
to = dbtcloud_global_connection.my_connection
id = "1234"
}

# using the older import command
terraform import dbtcloud_global_connection.my_connection "connection_id"
terraform import dbtcloud_global_connection.my_connection 1234
```
21 changes: 21 additions & 0 deletions examples/resources/dbtcloud_global_connection/import.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# A project-scoped connection can be imported as a global connection by specifying the connection ID
# Migrating from project-scoped connections to global connections could be done by:
# 1. Adding the config for the global connection and importing it (see below)
# 2. Removing the project-scoped connection from the config AND from the state
# - CAREFUL: If the connection is removed from the config but not the state, it will be destroyed on the next apply


# using import blocks (requires Terraform >= 1.5)
import {
to = dbtcloud_global_connection.my_connection
id = "connection_id"
}

import {
to = dbtcloud_global_connection.my_connection
id = "1234"
}

# using the older import command
terraform import dbtcloud_global_connection.my_connection "connection_id"
terraform import dbtcloud_global_connection.my_connection 1234
42 changes: 27 additions & 15 deletions examples/resources/dbtcloud_global_connection/resource.tf
Original file line number Diff line number Diff line change
@@ -1,18 +1,3 @@
resource "dbtcloud_global_connection" "snowflake" {
name = "My Snowflake connection"
// we can set Privatelink if needed
private_link_endpoint_id = data.dbtcloud_privatelink_endpoint.my_private_link.id
snowflake = {
account = "my-snowflake-account"
database = "MY_DATABASE"
warehouse = "MY_WAREHOUSE"
client_session_keep_alive = false
allow_sso = true
oauth_client_id = "yourclientid"
oauth_client_secret = "yourclientsecret"
}
}

resource "dbtcloud_global_connection" "bigquery" {
name = "My BigQuery connection"
bigquery = {
Expand All @@ -29,4 +14,31 @@ resource "dbtcloud_global_connection" "bigquery" {
application_id = "oauth_application_id"
application_secret = "oauth_secret_id"
}
}

resource "dbtcloud_global_connection" "databricks" {
name = "My Databricks connection"
databricks = {
host = "my-databricks-host.cloud.databricks.com"
http_path = "/sql/my/http/path"
// optional fields
catalog = "dbt_catalog"
client_id = "yourclientid"
client_secret = "yourclientsecret"
}
}

resource "dbtcloud_global_connection" "snowflake" {
name = "My Snowflake connection"
// we can set Privatelink if needed
private_link_endpoint_id = data.dbtcloud_privatelink_endpoint.my_private_link.id
snowflake = {
account = "my-snowflake-account"
database = "MY_DATABASE"
warehouse = "MY_WAREHOUSE"
client_session_keep_alive = false
allow_sso = true
oauth_client_id = "yourclientid"
oauth_client_secret = "yourclientsecret"
}
}
51 changes: 51 additions & 0 deletions pkg/dbt_cloud/global_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,45 @@ type GlobalConnectionConfig interface {
AdapterVersion() string
}

// TODO: Could be improved in the future, maybe creating a client with empty Config
// For now, I couldn't use it as the AdapterVersion is not returned in the GET response
// To be revisited when we handle different versions for the same adapter
type GlobalConnectionAdapter struct {
Data struct {
ID int64 `json:"id"`
AdapterVersion string `json:"adapter_version"`
} `json:"data"`
}

func (c *Client) GetGlobalConnectionAdapter(connectionID int64) (*GlobalConnectionAdapter, error) {
req, err := http.NewRequest(
"GET",
fmt.Sprintf(
"%s/v3/accounts/%d/connections/%d/",
c.HostURL,
c.AccountID,
connectionID,
),
nil,
)
if err != nil {
return nil, err
}

body, err := c.doRequest(req)
if err != nil {
return nil, err
}

connectionResponse := GlobalConnectionAdapter{}
err = json.Unmarshal(body, &connectionResponse)
if err != nil {
return nil, err
}

return &connectionResponse, nil
}

type GlobalConnectionCommon struct {
ID *int64 `json:"id,omitempty"`
Name *string `json:"name,omitempty"`
Expand Down Expand Up @@ -240,3 +279,15 @@ type BigQueryConfig struct {
func (BigQueryConfig) AdapterVersion() string {
return "bigquery_v0"
}

type DatabricksConfig struct {
Host *string `json:"host,omitempty"`
HTTPPath *string `json:"http_path,omitempty"`
Catalog nullable.Nullable[string] `json:"catalog,omitempty"`
ClientID nullable.Nullable[string] `json:"client_id,omitempty"`
ClientSecret nullable.Nullable[string] `json:"client_secret,omitempty"`
}

func (DatabricksConfig) AdapterVersion() string {
return "databricks_v0"
}
34 changes: 24 additions & 10 deletions pkg/framework/objects/global_connection/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,26 @@ package global_connection

import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/samber/lo"
)

var supportedGlobalConfigTypes = []string{"bigquery", "snowflake"}
var mappingAdapterEmptyConfig = map[string]any{
"bigquery": BigQueryConfig{},
"snowflake": SnowflakeConfig{},
"databricks": DatabricksConfig{},
}
var supportedGlobalConfigTypes = lo.Keys(mappingAdapterEmptyConfig)

type GlobalConnectionResourceModel struct {
ID types.Int64 `tfsdk:"id"`
AdapterVersion types.String `tfsdk:"adapter_version"`
Name types.String `tfsdk:"name"`
IsSshTunnelEnabled types.Bool `tfsdk:"is_ssh_tunnel_enabled"`
PrivateLinkEndpointId types.String `tfsdk:"private_link_endpoint_id"`
OauthConfigurationId types.Int64 `tfsdk:"oauth_configuration_id"`
SnowflakeConfig *SnowflakeConfig `tfsdk:"snowflake"`
BigQueryConfig *BigQueryConfig `tfsdk:"bigquery"`
ID types.Int64 `tfsdk:"id"`
AdapterVersion types.String `tfsdk:"adapter_version"`
Name types.String `tfsdk:"name"`
IsSshTunnelEnabled types.Bool `tfsdk:"is_ssh_tunnel_enabled"`
PrivateLinkEndpointId types.String `tfsdk:"private_link_endpoint_id"`
OauthConfigurationId types.Int64 `tfsdk:"oauth_configuration_id"`
SnowflakeConfig *SnowflakeConfig `tfsdk:"snowflake"`
BigQueryConfig *BigQueryConfig `tfsdk:"bigquery"`
DatabricksConfig *DatabricksConfig `tfsdk:"databricks"`
}

type BigQueryConfig struct {
Expand Down Expand Up @@ -57,7 +64,14 @@ type SnowflakeConfig struct {
Role types.String `tfsdk:"role"`
}

type DatabricksConfig struct{}
type DatabricksConfig struct {
Host types.String `tfsdk:"host"`
HTTPPath types.String `tfsdk:"http_path"`
// nullable
Catalog types.String `tfsdk:"catalog"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
}

type RedshiftConfig struct{}

Expand Down
Loading

0 comments on commit eb97618

Please sign in to comment.