diff --git a/VERSION b/VERSION index 1df5b7ec..9a52cbd4 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.0.46 +0.0.69 diff --git a/docs/data-sources/dbt_cloud_environment.md b/docs/data-sources/dbt_cloud_environment.md new file mode 100644 index 00000000..ab4cc43a --- /dev/null +++ b/docs/data-sources/dbt_cloud_environment.md @@ -0,0 +1,37 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "dbt_cloud_environment Data Source - terraform-provider-dbt-cloud" +subcategory: "" +description: |- + +--- + +# dbt_cloud_environment (Data Source) + + + + + + +## Schema + +### Required + +- **environment_id** (Number) Project ID to create the environment in +- **project_id** (Number) Project ID to create the environment in + +### Optional + +- **id** (String) The ID of this resource. + +### Read-Only + +- **credential_id** (Number) Credential ID to create the environment with +- **custom_branch** (String) Which custom branch to use in this environment +- **dbt_version** (String) Version number of dbt to use in this environment +- **is_active** (Boolean) Whether the environment is active +- **name** (String) Environment name +- **type** (String) The type of environment (must be either development or deployment) +- **use_custom_branch** (Boolean) Whether to use a custom git branch in this environment + + diff --git a/docs/data-sources/dbt_cloud_snowflake_credential.md b/docs/data-sources/dbt_cloud_snowflake_credential.md new file mode 100644 index 00000000..ce672029 --- /dev/null +++ b/docs/data-sources/dbt_cloud_snowflake_credential.md @@ -0,0 +1,36 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "dbt_cloud_snowflake_credential Data Source - terraform-provider-dbt-cloud" +subcategory: "" +description: |- + +--- + +# dbt_cloud_snowflake_credential (Data Source) + + + + + + +## Schema + +### Required + +- **credential_id** (Number) Credential ID +- **project_id** (Number) Project ID + +### Optional + +- **id** (String) The ID of this resource. + +### Read-Only + +- **auth_type** (String) The type of Snowflake credential ('password' only currently supported in Terraform) +- **is_active** (Boolean) Whether the Snowflake credential is active +- **num_threads** (Number) Number of threads to use +- **password** (String, Sensitive) Password for Snowflake +- **schema** (String) Default schema name +- **user** (String) Username for Snowflake + + diff --git a/docs/resources/dbt_cloud_environment.md b/docs/resources/dbt_cloud_environment.md new file mode 100644 index 00000000..bd8965a3 --- /dev/null +++ b/docs/resources/dbt_cloud_environment.md @@ -0,0 +1,33 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "dbt_cloud_environment Resource - terraform-provider-dbt-cloud" +subcategory: "" +description: |- + +--- + +# dbt_cloud_environment (Resource) + + + + + + +## Schema + +### Required + +- **dbt_version** (String) Version number of dbt to use in this environment +- **name** (String) Environment name +- **project_id** (Number) Project ID to create the environment in +- **type** (String) The type of environment (must be either development or deployment) + +### Optional + +- **credential_id** (Number) Credential ID to create the environment with +- **custom_branch** (String) Which custom branch to use in this environment +- **id** (String) The ID of this resource. +- **is_active** (Boolean) Whether the environment is active +- **use_custom_branch** (Boolean) Whether to use a custom git branch in this environment + + diff --git a/docs/resources/dbt_cloud_snowflake_credential.md b/docs/resources/dbt_cloud_snowflake_credential.md new file mode 100644 index 00000000..09bdbf15 --- /dev/null +++ b/docs/resources/dbt_cloud_snowflake_credential.md @@ -0,0 +1,36 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "dbt_cloud_snowflake_credential Resource - terraform-provider-dbt-cloud" +subcategory: "" +description: |- + +--- + +# dbt_cloud_snowflake_credential (Resource) + + + + + + +## Schema + +### Required + +- **auth_type** (String) The type of Snowflake credential ('password' only currently supported in Terraform) +- **num_threads** (Number) Number of threads to use +- **password** (String, Sensitive) Password for Snowflake +- **project_id** (Number) Project ID to create the Snowflake credential in +- **schema** (String) Default schema name +- **user** (String) Username for Snowflake + +### Optional + +- **id** (String) The ID of this resource. +- **is_active** (Boolean) Whether the Snowflake credential is active + +### Read-Only + +- **credential_id** (Number) The system Snowflake credential ID + + diff --git a/pkg/data_sources/environment.go b/pkg/data_sources/environment.go new file mode 100644 index 00000000..890eee3a --- /dev/null +++ b/pkg/data_sources/environment.go @@ -0,0 +1,119 @@ +package data_sources + +import ( + "context" + "fmt" + + "github.com/gthesheep/terraform-provider-dbt-cloud/pkg/dbt_cloud" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var environmentSchema = map[string]*schema.Schema{ + "environment_id": &schema.Schema{ + Type: schema.TypeInt, + Required: true, + Description: "Project ID to create the environment in", + }, + "project_id": &schema.Schema{ + Type: schema.TypeInt, + Required: true, + Description: "Project ID to create the environment in", + }, + "is_active": &schema.Schema{ + Type: schema.TypeBool, + Computed: true, + Description: "Whether the environment is active", + }, + "credential_id": &schema.Schema{ + Type: schema.TypeInt, + Computed: true, + Description: "Credential ID to create the environment with", + }, + "name": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "Environment name", + }, + "dbt_version": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "Version number of dbt to use in this environment", + }, + "type": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The type of environment (must be either development or deployment)", + ValidateFunc: func(val interface{}, key string) (warns []string, errs []error) { + type_ := val.(string) + switch type_ { + case + "development", + "deployment": + return + } + errs = append(errs, fmt.Errorf("%q must be either development or deployment, got: %q", key, type_)) + return + }, + }, + "use_custom_branch": &schema.Schema{ + Type: schema.TypeBool, + Computed: true, + Description: "Whether to use a custom git branch in this environment", + }, + "custom_branch": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "Which custom branch to use in this environment", + }, +} + +func DatasourceEnvironment() *schema.Resource { + return &schema.Resource{ + ReadContext: datasourceEnvironmentRead, + Schema: environmentSchema, + } +} + +func datasourceEnvironmentRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + var diags diag.Diagnostics + + environmentID := d.Get("environment_id").(int) + projectID := d.Get("project_id").(int) + + environment, err := c.GetEnvironment(projectID, environmentID) + if err != nil { + return diag.FromErr(err) + } + + if err := d.Set("is_active", environment.State == dbt_cloud.STATE_ACTIVE); err != nil { + return diag.FromErr(err) + } + if err := d.Set("project_id", environment.Project_Id); err != nil { + return diag.FromErr(err) + } + if err := d.Set("credential_id", environment.Credential_Id); err != nil { + return diag.FromErr(err) + } + if err := d.Set("name", environment.Name); err != nil { + return diag.FromErr(err) + } + if err := d.Set("dbt_version", environment.Dbt_Version); err != nil { + return diag.FromErr(err) + } + if err := d.Set("type", environment.Type); err != nil { + return diag.FromErr(err) + } + if err := d.Set("use_custom_branch", environment.Use_Custom_Branch); err != nil { + return diag.FromErr(err) + } + if err := d.Set("custom_branch", environment.Custom_Branch); err != nil { + return diag.FromErr(err) + } + + d.SetId(fmt.Sprintf("%d%s%d", environment.Project_Id, dbt_cloud.ID_DELIMITER, *environment.ID)) + + return diags +} diff --git a/pkg/data_sources/environment_test.go b/pkg/data_sources/environment_test.go new file mode 100644 index 00000000..deb78914 --- /dev/null +++ b/pkg/data_sources/environment_test.go @@ -0,0 +1,45 @@ +package data_sources_test + +import ( + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDbtCloudEnvironmentDataSource(t *testing.T) { + + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + randomIDInt, _ := strconv.Atoi(randomID) + + config := fmt.Sprintf(` + data "dbt_cloud_environment" "test" { + project_id = 123 + environment_id = %d + } + `, randomIDInt) + + check := resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.dbt_cloud_environment.test", "environment_id", randomID), + resource.TestCheckResourceAttr("data.dbt_cloud_environment.test", "project_id", "123"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "name"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "is_active"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "credential_id"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "dbt_version"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "type"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "use_custom_branch"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "custom_branch"), + ) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) +} diff --git a/pkg/data_sources/snowflake_credential.go b/pkg/data_sources/snowflake_credential.go new file mode 100644 index 00000000..9596160e --- /dev/null +++ b/pkg/data_sources/snowflake_credential.go @@ -0,0 +1,112 @@ +package data_sources + +import ( + "context" + "fmt" + + "github.com/gthesheep/terraform-provider-dbt-cloud/pkg/dbt_cloud" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var snowflakeCredentialSchema = map[string]*schema.Schema{ + "project_id": &schema.Schema{ + Type: schema.TypeInt, + Required: true, + Description: "Project ID", + }, + "credential_id": &schema.Schema{ + Type: schema.TypeInt, + Required: true, + Description: "Credential ID", + }, + "is_active": &schema.Schema{ + Type: schema.TypeBool, + Computed: true, + Description: "Whether the Snowflake credential is active", + }, + "auth_type": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The type of Snowflake credential ('password' only currently supported in Terraform)", + ValidateFunc: func(val interface{}, key string) (warns []string, errs []error) { + type_ := val.(string) + switch type_ { + case + "password": + return + } + errs = append(errs, fmt.Errorf("%q must be password, got: %q", key, type_)) + return + }, + }, + "schema": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "Default schema name", + }, + "user": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "Username for Snowflake", + }, + "password": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Sensitive: true, + Description: "Password for Snowflake", + }, + "num_threads": &schema.Schema{ + Type: schema.TypeInt, + Computed: true, + Description: "Number of threads to use", + }, + // TODO: add private_key and private_key_passphrase +} + +func DatasourceSnowflakeCredential() *schema.Resource { + return &schema.Resource{ + ReadContext: snowflakeCredentialRead, + Schema: snowflakeCredentialSchema, + } +} + +func snowflakeCredentialRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + var diags diag.Diagnostics + + credentialID := d.Get("credential_id").(int) + projectID := d.Get("project_id").(int) + + snowflakeCredential, err := c.GetSnowflakeCredential(projectID, credentialID) + if err != nil { + return diag.FromErr(err) + } + + if err := d.Set("is_active", snowflakeCredential.State == dbt_cloud.STATE_ACTIVE); err != nil { + return diag.FromErr(err) + } + if err := d.Set("project_id", snowflakeCredential.Project_Id); err != nil { + return diag.FromErr(err) + } + if err := d.Set("auth_type", snowflakeCredential.Auth_Type); err != nil { + return diag.FromErr(err) + } + if err := d.Set("schema", snowflakeCredential.Schema); err != nil { + return diag.FromErr(err) + } + if err := d.Set("user", snowflakeCredential.User); err != nil { + return diag.FromErr(err) + } + if err := d.Set("password", snowflakeCredential.Password); err != nil { + return diag.FromErr(err) + } + if err := d.Set("num_threads", snowflakeCredential.Threads); err != nil { + return diag.FromErr(err) + } + + d.SetId(fmt.Sprintf("%d%s%d", snowflakeCredential.Project_Id, dbt_cloud.ID_DELIMITER, *snowflakeCredential.ID)) + + return diags +} diff --git a/pkg/data_sources/snowflake_credential_test.go b/pkg/data_sources/snowflake_credential_test.go new file mode 100644 index 00000000..c8e587f8 --- /dev/null +++ b/pkg/data_sources/snowflake_credential_test.go @@ -0,0 +1,44 @@ +package data_sources_test + +import ( + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDbtCloudSnowflakeCredentialDataSource(t *testing.T) { + + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + randomIDInt, _ := strconv.Atoi(randomID) + + config := fmt.Sprintf(` + data "dbt_cloud_snowflake_credential" "test" { + project_id = 123 + credential_id = %d + } + `, randomIDInt) + + check := resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.dbt_cloud_snowflake_credential.test", "credential_id", randomID), + resource.TestCheckResourceAttr("data.dbt_cloud_snowflake_credential.test", "project_id", "123"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "auth_type"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "is_active"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "schema"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "user"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "password"), + resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "num_threads"), + ) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) +} diff --git a/pkg/dbt_cloud/common.go b/pkg/dbt_cloud/common.go new file mode 100644 index 00000000..54fb88b2 --- /dev/null +++ b/pkg/dbt_cloud/common.go @@ -0,0 +1,7 @@ +package dbt_cloud + +const ( + STATE_ACTIVE = 1 + STATE_DELETED = 2 + ID_DELIMITER = ":" +) diff --git a/pkg/dbt_cloud/environment.go b/pkg/dbt_cloud/environment.go new file mode 100644 index 00000000..428ae527 --- /dev/null +++ b/pkg/dbt_cloud/environment.go @@ -0,0 +1,123 @@ +package dbt_cloud + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" +) + +type EnvironmentListResponse struct { + Data []Environment `json:"data"` + Status ResponseStatus `json:"status"` +} + +type EnvironmentResponse struct { + Data Environment `json:"data"` + Status ResponseStatus `json:"status"` +} + +type Environment struct { + ID *int `json:"id"` + State int `json:"state"` + Account_Id int `json:"account_id"` + Project_Id int `json:"project_id"` + Credential_Id *int `json:"credentials_id"` + Name string `json:"name"` + Dbt_Version string `json:"dbt_version"` + Type string `json:"type"` + Use_Custom_Branch bool `json:"use_custom_branch"` + Custom_Branch string `json:"custom_branch"` +} + +func (c *Client) GetEnvironment(projectId int, environmentId int) (*Environment, error) { + req, err := http.NewRequest("GET", fmt.Sprintf("%s/v3/accounts/%d/projects/%d/environments/", HostURL, c.AccountID, projectId), nil) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + environmentListResponse := EnvironmentListResponse{} + err = json.Unmarshal(body, &environmentListResponse) + if err != nil { + return nil, err + } + + for i, environment := range environmentListResponse.Data { + if *environment.ID == environmentId { + return &environmentListResponse.Data[i], nil + } + } + + return nil, fmt.Errorf("did not find environment ID %d in project ID %d", environmentId, projectId) +} + +func (c *Client) CreateEnvironment(isActive bool, projectId int, name string, dbtVersion string, type_ string, useCustomBranch bool, customBranch string, credentialId int) (*Environment, error) { + state := 1 + if !isActive { + state = 2 + } + + newEnvironment := Environment{ + State: state, + Account_Id: c.AccountID, + Project_Id: projectId, + Name: name, + Dbt_Version: dbtVersion, + Type: type_, + Credential_Id: &credentialId, + Use_Custom_Branch: useCustomBranch, + Custom_Branch: customBranch, + } + newEnvironmentData, err := json.Marshal(newEnvironment) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", fmt.Sprintf("%s/v3/accounts/%d/projects/%d/environments/", HostURL, c.AccountID, projectId), strings.NewReader(string(newEnvironmentData))) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + environmentResponse := EnvironmentResponse{} + err = json.Unmarshal(body, &environmentResponse) + if err != nil { + return nil, err + } + + return &environmentResponse.Data, nil +} + +func (c *Client) UpdateEnvironment(projectId int, environmentId int, environment Environment) (*Environment, error) { + environmentData, err := json.Marshal(environment) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", fmt.Sprintf("%s/v3/accounts/%d/projects/%d/environments/%d", HostURL, c.AccountID, projectId, environmentId), strings.NewReader(string(environmentData))) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + environmentResponse := EnvironmentResponse{} + err = json.Unmarshal(body, &environmentResponse) + if err != nil { + return nil, err + } + + return &environmentResponse.Data, nil +} diff --git a/pkg/dbt_cloud/project.go b/pkg/dbt_cloud/project.go index 12f77b89..475ec78e 100644 --- a/pkg/dbt_cloud/project.go +++ b/pkg/dbt_cloud/project.go @@ -45,15 +45,15 @@ func (c *Client) GetProject(projectID string) (*Project, error) { if err != nil { return nil, err } - + return &projectResponse.Data, nil } func (c *Client) CreateProject(name string, dbtProjectSubdirectory string, connectionID int, repositoryID int) (*Project, error) { newProject := Project{ - Name: name, - State: 1, - AccountID: c.AccountID, + Name: name, + State: 1, + AccountID: c.AccountID, } if dbtProjectSubdirectory != "" { newProject.DbtProjectSubdirectory = &dbtProjectSubdirectory @@ -64,7 +64,7 @@ func (c *Client) CreateProject(name string, dbtProjectSubdirectory string, conne if repositoryID != 0 { newProject.RepositoryID = &repositoryID } - + newProjectData, err := json.Marshal(newProject) if err != nil { return nil, err diff --git a/pkg/dbt_cloud/snowflake_credential.go b/pkg/dbt_cloud/snowflake_credential.go new file mode 100644 index 00000000..72cdc993 --- /dev/null +++ b/pkg/dbt_cloud/snowflake_credential.go @@ -0,0 +1,118 @@ +package dbt_cloud + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" +) + +type SnowflakeCredentialListResponse struct { + Data []SnowflakeCredential `json:"data"` + Status ResponseStatus `json:"status"` +} + +type SnowflakeCredentialResponse struct { + Data SnowflakeCredential `json:"data"` + Status ResponseStatus `json:"status"` +} + +type SnowflakeCredential struct { + ID *int `json:"id"` + Account_Id int `json:"account_id"` + Project_Id int `json:"project_id"` + Type string `json:"type"` + State int `json:"state"` + Threads int `json:"threads"` + User string `json:"user"` + Password *string `json:"password"` + Auth_Type string `json:"auth_type"` + Schema string `json:"schema"` +} + +func (c *Client) GetSnowflakeCredential(projectId int, credentialId int) (*SnowflakeCredential, error) { + req, err := http.NewRequest("GET", fmt.Sprintf("%s/v3/accounts/%d/projects/%d/credentials/", HostURL, c.AccountID, projectId), nil) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + snowflakeCredentialListResponse := SnowflakeCredentialListResponse{} + err = json.Unmarshal(body, &snowflakeCredentialListResponse) + if err != nil { + return nil, err + } + + for i, credential := range snowflakeCredentialListResponse.Data { + if *credential.ID == credentialId { + return &snowflakeCredentialListResponse.Data[i], nil + } + } + + return nil, fmt.Errorf("did not find credential ID %d in project ID %d", credentialId, projectId) +} + +func (c *Client) CreateSnowflakeCredential(projectId int, type_ string, isActive bool, schema string, user string, password string, authType string, numThreads int) (*SnowflakeCredential, error) { + newSnowflakeCredential := SnowflakeCredential{ + Account_Id: c.AccountID, + Project_Id: projectId, + Type: type_, + State: 1, // TODO: make variable + Schema: schema, + User: user, + Password: &password, + Auth_Type: authType, + Threads: numThreads, + } + newSnowflakeCredentialData, err := json.Marshal(newSnowflakeCredential) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", fmt.Sprintf("%s/v3/accounts/%d/projects/%d/credentials/", HostURL, c.AccountID, projectId), strings.NewReader(string(newSnowflakeCredentialData))) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + snowflakeCredentialResponse := SnowflakeCredentialResponse{} + err = json.Unmarshal(body, &snowflakeCredentialResponse) + if err != nil { + return nil, err + } + + return &snowflakeCredentialResponse.Data, nil +} + +func (c *Client) UpdateSnowflakeCredential(projectId int, credentialId int, snowflakeCredential SnowflakeCredential) (*SnowflakeCredential, error) { + snowflakeCredentialData, err := json.Marshal(snowflakeCredential) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", fmt.Sprintf("%s/v3/accounts/%d/projects/%d/credentials/%d", HostURL, c.AccountID, projectId, credentialId), strings.NewReader(string(snowflakeCredentialData))) + if err != nil { + return nil, err + } + + body, err := c.doRequest(req) + if err != nil { + return nil, err + } + + snowflakeCredentialResponse := SnowflakeCredentialResponse{} + err = json.Unmarshal(body, &snowflakeCredentialResponse) + if err != nil { + return nil, err + } + + return &snowflakeCredentialResponse.Data, nil +} diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 68bab898..d4df7abe 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -28,12 +28,16 @@ func Provider() *schema.Provider { }, }, DataSourcesMap: map[string]*schema.Resource{ - "dbt_cloud_job": data_sources.DatasourceJob(), - "dbt_cloud_project": data_sources.DatasourceProject(), + "dbt_cloud_job": data_sources.DatasourceJob(), + "dbt_cloud_project": data_sources.DatasourceProject(), + "dbt_cloud_environment": data_sources.DatasourceEnvironment(), + "dbt_cloud_snowflake_credential": data_sources.DatasourceSnowflakeCredential(), }, ResourcesMap: map[string]*schema.Resource{ - "dbt_cloud_job": resources.ResourceJob(), - "dbt_cloud_project": resources.ResourceProject(), + "dbt_cloud_job": resources.ResourceJob(), + "dbt_cloud_project": resources.ResourceProject(), + "dbt_cloud_environment": resources.ResourceEnvironment(), + "dbt_cloud_snowflake_credential": resources.ResourceSnowflakeCredential(), }, ConfigureContextFunc: providerConfigure, } diff --git a/pkg/resources/environment.go b/pkg/resources/environment.go new file mode 100644 index 00000000..cb90959b --- /dev/null +++ b/pkg/resources/environment.go @@ -0,0 +1,223 @@ +package resources + +import ( + "context" + "fmt" + "log" + "strconv" + "strings" + + "github.com/gthesheep/terraform-provider-dbt-cloud/pkg/dbt_cloud" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +const ENVIRONMENT_STATE_ACTIVE = 1 +const ENVIRONMENT_STATE_DELETED = 2 + +func ResourceEnvironment() *schema.Resource { + return &schema.Resource{ + CreateContext: resourceEnvironmentCreate, + ReadContext: resourceEnvironmentRead, + UpdateContext: resourceEnvironmentUpdate, + DeleteContext: resourceEnvironmentDelete, + + Schema: map[string]*schema.Schema{ + "is_active": &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Whether the environment is active", + }, + "project_id": &schema.Schema{ + Type: schema.TypeInt, + Required: true, + Description: "Project ID to create the environment in", + }, + "credential_id": &schema.Schema{ + Type: schema.TypeInt, + Optional: true, + Default: nil, + Description: "Credential ID to create the environment with", + }, + "name": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "Environment name", + }, + "dbt_version": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "Version number of dbt to use in this environment", + }, + "type": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "The type of environment (must be either development or deployment)", + ValidateFunc: func(val interface{}, key string) (warns []string, errs []error) { + type_ := val.(string) + switch type_ { + case + "development", + "deployment": + return + } + errs = append(errs, fmt.Errorf("%q must be either development or deployment, got: %q", key, type_)) + return + }, + }, + "use_custom_branch": &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Whether to use a custom git branch in this environment", + }, + "custom_branch": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Default: "", + Description: "Which custom branch to use in this environment", + }, + }, + + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func resourceEnvironmentCreate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + isActive := d.Get("is_active").(bool) + projectId := d.Get("project_id").(int) + credentialId := d.Get("credential_id").(int) + name := d.Get("name").(string) + dbtVersion := d.Get("dbt_version").(string) + type_ := d.Get("type").(string) + useCustomBranch := d.Get("use_custom_branch").(bool) + customBranch := d.Get("custom_branch").(string) + + environment, err := c.CreateEnvironment(isActive, projectId, name, dbtVersion, type_, useCustomBranch, customBranch, credentialId) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(fmt.Sprintf("%d%s%d", environment.Project_Id, dbt_cloud.ID_DELIMITER, *environment.ID)) + + resourceJobRead(ctx, d, m) + + return diags +} + +func resourceEnvironmentRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + projectId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[0]) + if err != nil { + return diag.FromErr(err) + } + + environmentId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[1]) + if err != nil { + return diag.FromErr(err) + } + + environment, err := c.GetEnvironment(projectId, environmentId) + if err != nil { + return diag.FromErr(err) + } + + if err := d.Set("is_active", environment.State == ENVIRONMENT_STATE_ACTIVE); err != nil { + return diag.FromErr(err) + } + if err := d.Set("project_id", environment.Project_Id); err != nil { + return diag.FromErr(err) + } + if err := d.Set("name", environment.Name); err != nil { + return diag.FromErr(err) + } + if err := d.Set("dbt_version", environment.Dbt_Version); err != nil { + return diag.FromErr(err) + } + if err := d.Set("type", environment.Type); err != nil { + return diag.FromErr(err) + } + if err := d.Set("use_custom_branch", environment.Use_Custom_Branch); err != nil { + return diag.FromErr(err) + } + if err := d.Set("custom_branch", environment.Custom_Branch); err != nil { + return diag.FromErr(err) + } + + return diags +} + +func resourceEnvironmentUpdate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + projectId, err := strconv.Atoi(strings.Split(d.Id(), ",")[0]) + if err != nil { + return diag.FromErr(err) + } + + environmentId, err := strconv.Atoi(strings.Split(d.Id(), ",")[1]) + if err != nil { + return diag.FromErr(err) + } + + // TODO: add more changes here + + if d.HasChange("name") { + environment, err := c.GetEnvironment(projectId, environmentId) + if err != nil { + return diag.FromErr(err) + } + + name := d.Get("name").(string) + environment.Name = name + _, err = c.UpdateEnvironment(projectId, environmentId, *environment) + if err != nil { + return diag.FromErr(err) + } + } + + return resourceEnvironmentRead(ctx, d, m) +} + +func resourceEnvironmentDelete(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + projectId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[0]) + if err != nil { + return diag.FromErr(err) + } + + environmentId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[1]) + if err != nil { + return diag.FromErr(err) + } + + log.Printf("Environment deleting is not yet supported in dbt Cloud, setting state to deleted") + + var diags diag.Diagnostics + + environment, err := c.GetEnvironment(projectId, environmentId) + if err != nil { + return diag.FromErr(err) + } + + environment.State = ENVIRONMENT_STATE_DELETED + _, err = c.UpdateEnvironment(projectId, environmentId, *environment) + if err != nil { + return diag.FromErr(err) + } + + return diags +} diff --git a/pkg/resources/environment_test.go b/pkg/resources/environment_test.go new file mode 100644 index 00000000..d6d8ebd9 --- /dev/null +++ b/pkg/resources/environment_test.go @@ -0,0 +1,47 @@ +package resources_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDbtCloudEnvironmentResource(t *testing.T) { + + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + + config := fmt.Sprintf(` + resource "dbt_cloud_environment" "test" { + is_active = true + name = "dbt-cloud-environment-%s" + project_id = 123 + dbt_version = "0.21.0" + type = "deployment" + use_custom_branch = true + custom_branch = "dev" + } + `, randomID) + + check := resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet("dbt_cloud_environment.test", "environment_id"), + resource.TestCheckResourceAttr("dbt_cloud_environment.test", "is_active", "true"), + resource.TestCheckResourceAttr("dbt_cloud_environment.test", "name", fmt.Sprintf("dbt-cloud-job-%s", randomID)), + resource.TestCheckResourceAttr("dbt_cloud_environment.test", "project_id", "123"), + resource.TestCheckResourceAttr("dbt_cloud_environment.test", "dbt_version", "0.21.0"), + resource.TestCheckResourceAttr("dbt_cloud_environment.test", "type", "deployment"), + resource.TestCheckResourceAttr("dbt_cloud_environment.test", "use_custom_branch", "true"), + resource.TestCheckResourceAttr("dbt_cloud_environment.test", "custom_branch", "dev"), + ) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) +} diff --git a/pkg/resources/job.go b/pkg/resources/job.go index d461de85..17a827e9 100644 --- a/pkg/resources/job.go +++ b/pkg/resources/job.go @@ -241,7 +241,7 @@ func resourceJobDelete(ctx context.Context, d *schema.ResourceData, m interface{ return diag.FromErr(err) } - job.State = 2 + job.State = dbt_cloud.STATE_DELETED _, err = c.UpdateJob(jobId, *job) if err != nil { return diag.FromErr(err) diff --git a/pkg/resources/project.go b/pkg/resources/project.go index 3de8775f..ed6d158a 100644 --- a/pkg/resources/project.go +++ b/pkg/resources/project.go @@ -143,7 +143,7 @@ func resourceProjectDelete(ctx context.Context, d *schema.ResourceData, m interf return diag.FromErr(err) } - project.State = 2 + project.State = dbt_cloud.STATE_DELETED _, err = c.UpdateProject(projectID, *project) if err != nil { return diag.FromErr(err) diff --git a/pkg/resources/snowflake_credential.go b/pkg/resources/snowflake_credential.go new file mode 100644 index 00000000..0c83fe47 --- /dev/null +++ b/pkg/resources/snowflake_credential.go @@ -0,0 +1,236 @@ +package resources + +import ( + "context" + "fmt" + "strconv" + "strings" + + "github.com/gthesheep/terraform-provider-dbt-cloud/pkg/dbt_cloud" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func ResourceSnowflakeCredential() *schema.Resource { + return &schema.Resource{ + CreateContext: resourceSnowflakeCredentialCreate, + ReadContext: resourceSnowflakeCredentialRead, + UpdateContext: resourceSnowflakeCredentialUpdate, + DeleteContext: resourceSnowflakeCredentialDelete, + + Schema: map[string]*schema.Schema{ + "is_active": &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Whether the Snowflake credential is active", + }, + "project_id": &schema.Schema{ + Type: schema.TypeInt, + Required: true, + Description: "Project ID to create the Snowflake credential in", + }, + "credential_id": &schema.Schema{ + Type: schema.TypeInt, + Computed: true, + Description: "The system Snowflake credential ID", + }, + "auth_type": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "The type of Snowflake credential ('password' only currently supported in Terraform)", + ValidateFunc: func(val interface{}, key string) (warns []string, errs []error) { + type_ := val.(string) + switch type_ { + case + "password": + return + } + errs = append(errs, fmt.Errorf("%q must be password, got: %q", key, type_)) + return + }, + }, + "schema": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "Default schema name", + }, + "user": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "Username for Snowflake", + }, + "password": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: "Password for Snowflake", + }, + "num_threads": &schema.Schema{ + Type: schema.TypeInt, + Required: true, + Description: "Number of threads to use", + }, + + // TODO: add private_key and private_key_passphrase + + }, + + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func resourceSnowflakeCredentialCreate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + isActive := d.Get("is_active").(bool) + projectId := d.Get("project_id").(int) + authType := d.Get("auth_type").(string) + schema := d.Get("schema").(string) + user := d.Get("user").(string) + password := d.Get("password").(string) + numThreads := d.Get("num_threads").(int) + + snowflakeCredential, err := c.CreateSnowflakeCredential(projectId, "snowflake", isActive, schema, user, password, authType, numThreads) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(fmt.Sprintf("%d%s%d", snowflakeCredential.Project_Id, dbt_cloud.ID_DELIMITER, *snowflakeCredential.ID)) + + resourceSnowflakeCredentialRead(ctx, d, m) + + return diags +} + +func resourceSnowflakeCredentialRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + projectId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[0]) + if err != nil { + return diag.FromErr(err) + } + + snowflakeCredentialId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[1]) + if err != nil { + return diag.FromErr(err) + } + + snowflakeCredential, err := c.GetSnowflakeCredential(projectId, snowflakeCredentialId) + if err != nil { + return diag.FromErr(err) + } + + if err := d.Set("credential_id", snowflakeCredentialId); err != nil { + return diag.FromErr(err) + } + if err := d.Set("is_active", snowflakeCredential.State == dbt_cloud.STATE_ACTIVE); err != nil { + return diag.FromErr(err) + } + if err := d.Set("project_id", snowflakeCredential.Project_Id); err != nil { + return diag.FromErr(err) + } + if err := d.Set("auth_type", snowflakeCredential.Auth_Type); err != nil { + return diag.FromErr(err) + } + if err := d.Set("schema", snowflakeCredential.Schema); err != nil { + return diag.FromErr(err) + } + if err := d.Set("user", snowflakeCredential.User); err != nil { + return diag.FromErr(err) + } + if err := d.Set("password", snowflakeCredential.Password); err != nil { + return diag.FromErr(err) + } + if err := d.Set("num_threads", snowflakeCredential.Threads); err != nil { + return diag.FromErr(err) + } + + return diags +} + +func resourceSnowflakeCredentialUpdate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + projectId, err := strconv.Atoi(strings.Split(d.Id(), ",")[0]) + if err != nil { + return diag.FromErr(err) + } + + snowflakeCredentialId, err := strconv.Atoi(strings.Split(d.Id(), ",")[1]) + if err != nil { + return diag.FromErr(err) + } + + if d.HasChange("auth_type") || d.HasChange("schema") || d.HasChange("user") || d.HasChange("password") || d.HasChange("num_threads") { + snowflakeCredential, err := c.GetSnowflakeCredential(projectId, snowflakeCredentialId) + if err != nil { + return diag.FromErr(err) + } + + if d.HasChange("auth_type") { + authType := d.Get("auth_type").(string) + snowflakeCredential.Auth_Type = authType + } + if d.HasChange("schema") { + schema := d.Get("schema").(string) + snowflakeCredential.Schema = schema + } + if d.HasChange("user") { + user := d.Get("user").(string) + snowflakeCredential.User = user + } + if d.HasChange("password") { + password := d.Get("password").(string) + snowflakeCredential.Password = &password + } + if d.HasChange("num_threads") { + numThreads := d.Get("num_threads").(int) + snowflakeCredential.Threads = numThreads + } + + _, err = c.UpdateSnowflakeCredential(projectId, snowflakeCredentialId, *snowflakeCredential) + if err != nil { + return diag.FromErr(err) + } + } + + return resourceSnowflakeCredentialRead(ctx, d, m) +} + +func resourceSnowflakeCredentialDelete(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + c := m.(*dbt_cloud.Client) + + var diags diag.Diagnostics + + projectId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[0]) + if err != nil { + return diag.FromErr(err) + } + + snowflakeCredentialId, err := strconv.Atoi(strings.Split(d.Id(), dbt_cloud.ID_DELIMITER)[1]) + if err != nil { + return diag.FromErr(err) + } + + snowflakeCredential, err := c.GetSnowflakeCredential(projectId, snowflakeCredentialId) + if err != nil { + return diag.FromErr(err) + } + + snowflakeCredential.State = dbt_cloud.STATE_DELETED + _, err = c.UpdateSnowflakeCredential(projectId, snowflakeCredentialId, *snowflakeCredential) + if err != nil { + return diag.FromErr(err) + } + + return diags +} diff --git a/pkg/resources/snowflake_credential_test.go b/pkg/resources/snowflake_credential_test.go new file mode 100644 index 00000000..7dcfb9d2 --- /dev/null +++ b/pkg/resources/snowflake_credential_test.go @@ -0,0 +1,44 @@ +package resources_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDbtCloudSnowflakeCredentialResource(t *testing.T) { + + config := fmt.Sprintf(` + resource "dbt_cloud_snowflake_credential" "test" { + is_active = true + project_id = 123 + auth_type = "password" + schema = "moo" + user = "test_user" + password = "test-password" + num_threads = 3 + } + `) + + check := resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet("dbt_cloud_snowflake_credential.test", "credential_id"), + resource.TestCheckResourceAttr("dbt_cloud_snowflake_credential.test", "is_active", "true"), + resource.TestCheckResourceAttr("dbt_cloud_snowflake_credential.test", "project_id", "123"), + resource.TestCheckResourceAttr("dbt_cloud_snowflake_credential.test", "auth_type", "password"), + resource.TestCheckResourceAttr("dbt_cloud_snowflake_credential.test", "schema", "moo"), + resource.TestCheckResourceAttr("dbt_cloud_snowflake_credential.test", "user", "test_user"), + resource.TestCheckResourceAttr("dbt_cloud_snowflake_credential.test", "password", "test-password"), + resource.TestCheckResourceAttr("dbt_cloud_snowflake_credential.test", "num_threads", "3"), + ) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) +}