From d544da56161f39e552f8e5aa5c965de07b33a170 Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Fri, 3 May 2024 02:39:36 -0400 Subject: [PATCH 01/11] Fix lints by golangci-lint and revive --- pagerduty/resource_pagerduty_addon_test.go | 140 ------------------ pagerdutyplugin/config.go | 18 +-- pagerdutyplugin/config_test.go | 6 +- .../data_source_pagerduty_business_service.go | 6 +- .../data_source_pagerduty_extension_schema.go | 6 +- .../data_source_pagerduty_standards.go | 12 +- ...rce_pagerduty_standards_resource_scores.go | 6 +- ...ce_pagerduty_standards_resources_scores.go | 6 +- .../data_source_pagerduty_standards_test.go | 6 +- pagerdutyplugin/data_source_pagerduty_tag.go | 12 +- ...ort_pagerduty_extension_servicenow_test.go | 4 +- .../import_pagerduty_extension_test.go | 4 +- pagerdutyplugin/provider.go | 34 ++--- .../resource_pagerduty_business_service.go | 4 +- .../resource_pagerduty_extension.go | 2 +- ...resource_pagerduty_extension_servicenow.go | 8 +- ...rce_pagerduty_extension_servicenow_test.go | 20 +-- .../resource_pagerduty_extension_test.go | 22 +-- pagerdutyplugin/resource_pagerduty_tag.go | 12 +- .../resource_pagerduty_tag_assignment.go | 4 +- .../resource_pagerduty_tag_assignment_test.go | 2 +- .../resource_pagerduty_tag_test.go | 2 +- 22 files changed, 97 insertions(+), 239 deletions(-) delete mode 100644 pagerduty/resource_pagerduty_addon_test.go diff --git a/pagerduty/resource_pagerduty_addon_test.go b/pagerduty/resource_pagerduty_addon_test.go deleted file mode 100644 index 1cbe5149f..000000000 --- a/pagerduty/resource_pagerduty_addon_test.go +++ /dev/null @@ -1,140 +0,0 @@ -package pagerduty - -import ( - "fmt" - "log" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/heimweh/go-pagerduty/pagerduty" -) - -func init() { - resource.AddTestSweepers("pagerduty_addon", &resource.Sweeper{ - Name: "pagerduty_addon", - F: testSweepAddon, - }) -} - -func testSweepAddon(region string) error { - config, err := sharedConfigForRegion(region) - if err != nil { - return err - } - - client, err := config.Client() - if err != nil { - return err - } - - resp, _, err := client.Addons.List(&pagerduty.ListAddonsOptions{}) - if err != nil { - return err - } - - for _, addon := range resp.Addons { - if strings.HasPrefix(addon.Name, "test") || strings.HasPrefix(addon.Name, "tf-") { - log.Printf("Destroying add-on %s (%s)", addon.Name, addon.ID) - if _, err := client.Addons.Delete(addon.ID); err != nil { - return err - } - } - } - - return nil -} - -func TestAccPagerDutyAddon_Basic(t *testing.T) { - addon := fmt.Sprintf("tf-%s", acctest.RandString(5)) - addonUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyAddonDestroy, - Steps: []resource.TestStep{ - { - Config: testAccCheckPagerDutyAddonConfig(addon), - Check: resource.ComposeTestCheckFunc( - testAccCheckPagerDutyAddonExists("pagerduty_addon.foo"), - resource.TestCheckResourceAttr( - "pagerduty_addon.foo", "name", addon), - resource.TestCheckResourceAttr( - "pagerduty_addon.foo", "src", "https://intranet.foo.test/status"), - ), - }, - { - Config: testAccCheckPagerDutyAddonConfigUpdated(addonUpdated), - Check: resource.ComposeTestCheckFunc( - testAccCheckPagerDutyAddonExists("pagerduty_addon.foo"), - resource.TestCheckResourceAttr( - "pagerduty_addon.foo", "name", addonUpdated), - resource.TestCheckResourceAttr( - "pagerduty_addon.foo", "src", "https://intranet.bar.com/status"), - ), - }, - }, - }) -} - -func testAccCheckPagerDutyAddonDestroy(s *terraform.State) error { - client, _ := testAccProvider.Meta().(*Config).Client() - for _, r := range s.RootModule().Resources { - if r.Type != "pagerduty_addon" { - continue - } - - if _, _, err := client.Addons.Get(r.Primary.ID); err == nil { - return fmt.Errorf("Add-on still exists") - } - - } - return nil -} - -func testAccCheckPagerDutyAddonExists(n string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[n] - if !ok { - return fmt.Errorf("Not found: %s", n) - } - - if rs.Primary.ID == "" { - return fmt.Errorf("No add-on ID is set") - } - - client, _ := testAccProvider.Meta().(*Config).Client() - - found, _, err := client.Addons.Get(rs.Primary.ID) - if err != nil { - return err - } - - if found.ID != rs.Primary.ID { - return fmt.Errorf("Add-on not found: %v - %v", rs.Primary.ID, found) - } - - return nil - } -} - -func testAccCheckPagerDutyAddonConfig(addon string) string { - return fmt.Sprintf(` -resource "pagerduty_addon" "foo" { - name = "%s" - src = "https://intranet.foo.test/status" -} -`, addon) -} - -func testAccCheckPagerDutyAddonConfigUpdated(addon string) string { - return fmt.Sprintf(` -resource "pagerduty_addon" "foo" { - name = "%s" - src = "https://intranet.bar.com/status" -} -`, addon) -} diff --git a/pagerdutyplugin/config.go b/pagerdutyplugin/config.go index 48b05a470..b2f11d0bc 100644 --- a/pagerdutyplugin/config.go +++ b/pagerdutyplugin/config.go @@ -21,13 +21,13 @@ type Config struct { mu sync.Mutex // The PagerDuty API URL - ApiUrl string + APIURL string // Override default PagerDuty API URL - ApiUrlOverride string + APIURLOverride string // The PagerDuty APP URL - AppUrl string + AppURL string // The PagerDuty API V2 token Token string @@ -52,7 +52,7 @@ type Config struct { } type AppOauthScopedToken struct { - ClientId, ClientSecret, Subdomain string + ClientID, ClientSecret, Subdomain string } const invalidCreds = ` @@ -75,9 +75,9 @@ func (c *Config) Client(ctx context.Context) (*pagerduty.Client, error) { httpClient.Timeout = 1 * time.Minute httpClient.Transport = logging.NewTransport("PagerDuty", http.DefaultTransport) - apiUrl := c.ApiUrl - if c.ApiUrlOverride != "" { - apiUrl = c.ApiUrlOverride + apiURL := c.APIURL + if c.APIURLOverride != "" { + apiURL = c.APIURLOverride } maxRetries := 1 @@ -85,7 +85,7 @@ func (c *Config) Client(ctx context.Context) (*pagerduty.Client, error) { clientOpts := []pagerduty.ClientOptions{ WithHTTPClient(httpClient), - pagerduty.WithAPIEndpoint(apiUrl), + pagerduty.WithAPIEndpoint(apiURL), pagerduty.WithTerraformProvider(c.TerraformVersion), pagerduty.WithRetryPolicy(maxRetries, retryInterval), } @@ -97,7 +97,7 @@ func (c *Config) Client(ctx context.Context) (*pagerduty.Client, error) { accountAndScopes = append(accountAndScopes, availableOauthScopes()...) opt := pagerduty.WithScopedOAuthAppTokenSource(pagerduty.NewFileTokenSource( ctx, - c.AppOauthScopedToken.ClientId, + c.AppOauthScopedToken.ClientID, c.AppOauthScopedToken.ClientSecret, accountAndScopes, tokenFile, diff --git a/pagerdutyplugin/config_test.go b/pagerdutyplugin/config_test.go index e199dbcb6..00450676a 100644 --- a/pagerdutyplugin/config_test.go +++ b/pagerdutyplugin/config_test.go @@ -32,7 +32,7 @@ func TestConfigSkipCredsValidation(t *testing.T) { func TestConfigCustomApiUrl(t *testing.T) { config := Config{ Token: "foo", - ApiUrl: "https://api.domain.tld", + APIURL: "https://api.domain.tld", SkipCredsValidation: true, } @@ -45,7 +45,7 @@ func TestConfigCustomApiUrl(t *testing.T) { func TestConfigCustomApiUrlOverride(t *testing.T) { config := Config{ Token: "foo", - ApiUrlOverride: "https://api.domain-override.tld", + APIURLOverride: "https://api.domain-override.tld", SkipCredsValidation: true, } @@ -58,7 +58,7 @@ func TestConfigCustomApiUrlOverride(t *testing.T) { func TestConfigCustomAppUrl(t *testing.T) { config := Config{ Token: "foo", - AppUrl: "https://app.domain.tld", + AppURL: "https://app.domain.tld", SkipCredsValidation: true, } diff --git a/pagerdutyplugin/data_source_pagerduty_business_service.go b/pagerdutyplugin/data_source_pagerduty_business_service.go index 81958235f..84e60d61c 100644 --- a/pagerdutyplugin/data_source_pagerduty_business_service.go +++ b/pagerdutyplugin/data_source_pagerduty_business_service.go @@ -19,11 +19,11 @@ type dataSourceBusinessService struct{ client *pagerduty.Client } var _ datasource.DataSourceWithConfigure = (*dataSourceBusinessService)(nil) -func (*dataSourceBusinessService) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (*dataSourceBusinessService) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_business_service" } -func (*dataSourceBusinessService) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (*dataSourceBusinessService) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{Computed: true}, @@ -33,7 +33,7 @@ func (*dataSourceBusinessService) Schema(ctx context.Context, req datasource.Sch } } -func (d *dataSourceBusinessService) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceBusinessService) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_extension_schema.go b/pagerdutyplugin/data_source_pagerduty_extension_schema.go index 3f0db9197..481e41947 100644 --- a/pagerdutyplugin/data_source_pagerduty_extension_schema.go +++ b/pagerdutyplugin/data_source_pagerduty_extension_schema.go @@ -20,11 +20,11 @@ type dataSourceExtensionSchema struct{ client *pagerduty.Client } var _ datasource.DataSourceWithConfigure = (*dataSourceExtensionSchema)(nil) -func (*dataSourceExtensionSchema) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (*dataSourceExtensionSchema) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_extension_schema" } -func (*dataSourceExtensionSchema) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (*dataSourceExtensionSchema) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{Computed: true}, @@ -34,7 +34,7 @@ func (*dataSourceExtensionSchema) Schema(ctx context.Context, req datasource.Sch } } -func (d *dataSourceExtensionSchema) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceExtensionSchema) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_standards.go b/pagerdutyplugin/data_source_pagerduty_standards.go index 628195370..6a0815d18 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards.go +++ b/pagerdutyplugin/data_source_pagerduty_standards.go @@ -17,11 +17,11 @@ type dataSourceStandards struct { var _ datasource.DataSourceWithConfigure = (*dataSourceStandards)(nil) -func (d *dataSourceStandards) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceStandards) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_standards" } -func (d *dataSourceStandards) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceStandards) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "resource_type": schema.StringAttribute{Optional: true}, @@ -33,6 +33,10 @@ func (d *dataSourceStandards) Schema(ctx context.Context, req datasource.SchemaR } } +func (d *dataSourceStandards) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) +} + func (d *dataSourceStandards) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var data dataSourceStandardsModel resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) @@ -54,10 +58,6 @@ func (d *dataSourceStandards) Read(ctx context.Context, req datasource.ReadReque resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } -func (d *dataSourceStandards) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) -} - func flattenStandards(ctx context.Context, list []pagerduty.Standard) (types.List, diag.Diagnostics) { var diagnostics diag.Diagnostics mapList := make([]types.Object, 0, len(list)) diff --git a/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go b/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go index dcb70a59b..c3e4fd733 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go +++ b/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go @@ -19,11 +19,11 @@ type dataSourceStandardsResourceScores struct { var _ datasource.DataSource = (*dataSourceStandardsResourceScores)(nil) -func (d *dataSourceStandardsResourceScores) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceStandardsResourceScores) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_standards_resource_scores" } -func (d *dataSourceStandardsResourceScores) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceStandardsResourceScores) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{Required: true}, @@ -45,7 +45,7 @@ func (d *dataSourceStandardsResourceScores) Schema(ctx context.Context, req data } } -func (d *dataSourceStandardsResourceScores) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceStandardsResourceScores) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go b/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go index 8368c9fd4..967113256 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go +++ b/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go @@ -19,11 +19,11 @@ type dataSourceStandardsResourcesScores struct { var _ datasource.DataSource = (*dataSourceStandardsResourcesScores)(nil) -func (d *dataSourceStandardsResourcesScores) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceStandardsResourcesScores) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_standards_resources_scores" } -func (d *dataSourceStandardsResourcesScores) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceStandardsResourcesScores) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "ids": schema.ListAttribute{ @@ -44,7 +44,7 @@ func (d *dataSourceStandardsResourcesScores) Schema(ctx context.Context, req dat } } -func (d *dataSourceStandardsResourcesScores) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceStandardsResourcesScores) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_standards_test.go b/pagerdutyplugin/data_source_pagerduty_standards_test.go index 436837431..bd369624a 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards_test.go +++ b/pagerdutyplugin/data_source_pagerduty_standards_test.go @@ -63,9 +63,9 @@ func testStandards(a map[string]string) error { } for _, att := range testAttrs { - required_sub_attr := fmt.Sprintf("standards.0.%s", att) - if _, ok := a[required_sub_attr]; !ok { - return fmt.Errorf("Expected the required attribute %s to exist", required_sub_attr) + requiredSubAttr := fmt.Sprintf("standards.0.%s", att) + if _, ok := a[requiredSubAttr]; !ok { + return fmt.Errorf("Expected the required attribute %s to exist", requiredSubAttr) } } diff --git a/pagerdutyplugin/data_source_pagerduty_tag.go b/pagerdutyplugin/data_source_pagerduty_tag.go index f162bb49c..b93a36c6a 100644 --- a/pagerdutyplugin/data_source_pagerduty_tag.go +++ b/pagerdutyplugin/data_source_pagerduty_tag.go @@ -21,15 +21,11 @@ type dataSourceTag struct { var _ datasource.DataSourceWithConfigure = (*dataSourceStandards)(nil) -func (d *dataSourceTag) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) -} - -func (d *dataSourceTag) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceTag) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_tag" } -func (d *dataSourceTag) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceTag) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "label": schema.StringAttribute{ @@ -41,6 +37,10 @@ func (d *dataSourceTag) Schema(ctx context.Context, req datasource.SchemaRequest } } +func (d *dataSourceTag) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) +} + func (d *dataSourceTag) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var searchTag string if d := req.Config.GetAttribute(ctx, path.Root("label"), &searchTag); d.HasError() { diff --git a/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go b/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go index 3fa273127..ea330df14 100644 --- a/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go +++ b/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go @@ -9,7 +9,7 @@ import ( ) func TestAccPagerDutyExtensionServiceNow_import(t *testing.T) { - extension_name := fmt.Sprintf("tf-%s", acctest.RandString(5)) + extensionName := fmt.Sprintf("tf-%s", acctest.RandString(5)) name := fmt.Sprintf("tf-%s", acctest.RandString(5)) url := "https://example.com/receive_a_pagerduty_webhook" @@ -19,7 +19,7 @@ func TestAccPagerDutyExtensionServiceNow_import(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionServiceNowDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extensionName, url, "false", "any"), }, { ResourceName: "pagerduty_extension_servicenow.foo", diff --git a/pagerdutyplugin/import_pagerduty_extension_test.go b/pagerdutyplugin/import_pagerduty_extension_test.go index 52095a1f9..1c0a7191c 100644 --- a/pagerdutyplugin/import_pagerduty_extension_test.go +++ b/pagerdutyplugin/import_pagerduty_extension_test.go @@ -9,7 +9,7 @@ import ( ) func TestAccPagerDutyExtension_import(t *testing.T) { - extension_name := fmt.Sprintf("tf-%s", acctest.RandString(5)) + extensionName := fmt.Sprintf("tf-%s", acctest.RandString(5)) name := fmt.Sprintf("tf-%s", acctest.RandString(5)) url := "https://example.com/receive_a_pagerduty_webhook" @@ -19,7 +19,7 @@ func TestAccPagerDutyExtension_import(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionConfig(name, extensionName, url, "false", "any"), }, { ResourceName: "pagerduty_extension.foo", diff --git a/pagerdutyplugin/provider.go b/pagerdutyplugin/provider.go index 59e3c2414..06a6852db 100644 --- a/pagerdutyplugin/provider.go +++ b/pagerdutyplugin/provider.go @@ -21,11 +21,11 @@ type Provider struct { client *pagerduty.Client } -func (p *Provider) Metadata(ctx context.Context, req provider.MetadataRequest, resp *provider.MetadataResponse) { +func (p *Provider) Metadata(_ context.Context, _ provider.MetadataRequest, resp *provider.MetadataResponse) { resp.TypeName = "pagerduty" } -func (p *Provider) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) { +func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) { useAppOauthScopedTokenBlock := schema.ListNestedBlock{ NestedObject: schema.NestedBlockObject{ Attributes: map[string]schema.Attribute{ @@ -49,7 +49,7 @@ func (p *Provider) Schema(ctx context.Context, req provider.SchemaRequest, resp } } -func (p *Provider) DataSources(ctx context.Context) [](func() datasource.DataSource) { +func (p *Provider) DataSources(_ context.Context) [](func() datasource.DataSource) { return [](func() datasource.DataSource){ func() datasource.DataSource { return &dataSourceBusinessService{} }, func() datasource.DataSource { return &dataSourceExtensionSchema{} }, @@ -60,7 +60,7 @@ func (p *Provider) DataSources(ctx context.Context) [](func() datasource.DataSou } } -func (p *Provider) Resources(ctx context.Context) [](func() resource.Resource) { +func (p *Provider) Resources(_ context.Context) [](func() resource.Resource) { return [](func() resource.Resource){ func() resource.Resource { return &resourceBusinessService{} }, func() resource.Resource { return &resourceExtensionServiceNow{} }, @@ -90,23 +90,21 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, } } - var regionApiUrl string - if serviceRegion == "us" { - regionApiUrl = "" - } else { - regionApiUrl = serviceRegion + "." + regionAPIURL := "" + if serviceRegion != "us" { + regionAPIURL = serviceRegion + "." } skipCredentialsValidation := args.SkipCredentialsValidation.Equal(types.BoolValue(true)) config := Config{ - ApiUrl: "https://api." + regionApiUrl + "pagerduty.com", - AppUrl: "https://app." + regionApiUrl + "pagerduty.com", + APIURL: "https://api." + regionAPIURL + "pagerduty.com", + AppURL: "https://app." + regionAPIURL + "pagerduty.com", SkipCredsValidation: skipCredentialsValidation, Token: args.Token.ValueString(), UserToken: args.UserToken.ValueString(), TerraformVersion: req.TerraformVersion, - ApiUrlOverride: args.ApiUrlOverride.ValueString(), + APIURLOverride: args.APIURLOverride.ValueString(), ServiceRegion: serviceRegion, } @@ -117,7 +115,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, return } config.AppOauthScopedToken = &AppOauthScopedToken{ - ClientId: blockList[0].PdClientId.ValueString(), + ClientID: blockList[0].PdClientID.ValueString(), ClientSecret: blockList[0].PdClientSecret.ValueString(), Subdomain: blockList[0].PdSubdomain.ValueString(), } @@ -131,8 +129,8 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, config.UserToken = os.Getenv("PAGERDUTY_USER_TOKEN") } } else { - if config.AppOauthScopedToken.ClientId == "" { - config.AppOauthScopedToken.ClientId = os.Getenv("PAGERDUTY_CLIENT_ID") + if config.AppOauthScopedToken.ClientID == "" { + config.AppOauthScopedToken.ClientID = os.Getenv("PAGERDUTY_CLIENT_ID") } if config.AppOauthScopedToken.ClientSecret == "" { config.AppOauthScopedToken.ClientSecret = os.Getenv("PAGERDUTY_CLIENT_SECRET") @@ -149,7 +147,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, // We had to define pd_client_id, pd_client_secret, and pd_subdomain // as Optional and manually check its presence here. li := []string{} - if config.AppOauthScopedToken.ClientId == "" { + if config.AppOauthScopedToken.ClientID == "" { li = append(li, "pd_client_id") } if config.AppOauthScopedToken.ClientSecret == "" { @@ -179,7 +177,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, } type UseAppOauthScopedToken struct { - PdClientId types.String `tfsdk:"pd_client_id"` + PdClientID types.String `tfsdk:"pd_client_id"` PdClientSecret types.String `tfsdk:"pd_client_secret"` PdSubdomain types.String `tfsdk:"pd_subdomain"` } @@ -189,7 +187,7 @@ type providerArguments struct { UserToken types.String `tfsdk:"user_token"` SkipCredentialsValidation types.Bool `tfsdk:"skip_credentials_validation"` ServiceRegion types.String `tfsdk:"service_region"` - ApiUrlOverride types.String `tfsdk:"api_url_override"` + APIURLOverride types.String `tfsdk:"api_url_override"` UseAppOauthScopedToken types.List `tfsdk:"use_app_oauth_scoped_token"` } diff --git a/pagerdutyplugin/resource_pagerduty_business_service.go b/pagerdutyplugin/resource_pagerduty_business_service.go index 7328be1f4..4b23c54b3 100644 --- a/pagerdutyplugin/resource_pagerduty_business_service.go +++ b/pagerdutyplugin/resource_pagerduty_business_service.go @@ -29,7 +29,7 @@ var ( _ resource.ResourceWithImportState = (*resourceBusinessService)(nil) ) -func (r *resourceBusinessService) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *resourceBusinessService) Metadata(_ context.Context, _ resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = "pagerduty_business_service" } @@ -161,7 +161,7 @@ func (r *resourceBusinessService) Delete(ctx context.Context, req resource.Delet resp.State.RemoveResource(ctx) } -func (r *resourceBusinessService) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceBusinessService) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/resource_pagerduty_extension.go b/pagerdutyplugin/resource_pagerduty_extension.go index 476653e0f..14ab5d597 100644 --- a/pagerdutyplugin/resource_pagerduty_extension.go +++ b/pagerdutyplugin/resource_pagerduty_extension.go @@ -187,7 +187,7 @@ func (r *resourceExtension) Delete(ctx context.Context, req resource.DeleteReque resp.State.RemoveResource(ctx) } -func (r *resourceExtension) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceExtension) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/resource_pagerduty_extension_servicenow.go b/pagerdutyplugin/resource_pagerduty_extension_servicenow.go index 7abf7e4e8..a5c8997e0 100644 --- a/pagerdutyplugin/resource_pagerduty_extension_servicenow.go +++ b/pagerdutyplugin/resource_pagerduty_extension_servicenow.go @@ -194,7 +194,7 @@ func (r *resourceExtensionServiceNow) Delete(ctx context.Context, req resource.D resp.State.RemoveResource(ctx) } -func (r *resourceExtensionServiceNow) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceExtensionServiceNow) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } @@ -266,7 +266,7 @@ func (r *resourceExtensionServiceNow) requestGetExtensionServiceNow(ctx context. } func buildPagerdutyExtensionServiceNow(ctx context.Context, model *resourceExtensionServiceNowModel, diags *diag.Diagnostics) *pagerduty.Extension { - config := &PagerDutyExtensionServiceNowConfig{ + config := &pagerDutyExtensionServiceNowConfig{ User: model.SnowUser.ValueString(), Password: model.SnowPassword.ValueString(), SyncOptions: model.SyncOptions.ValueString(), @@ -313,7 +313,7 @@ func flattenExtensionServiceNow(src *pagerduty.Extension, snowPassword *string, } b, _ := json.Marshal(src.Config) - var config PagerDutyExtensionServiceNowConfig + var config pagerDutyExtensionServiceNowConfig _ = json.Unmarshal(b, &config) model.SnowUser = types.StringValue(config.User) @@ -344,7 +344,7 @@ func flattenExtensionServiceNowObjects(list []pagerduty.APIObject) types.Set { return types.SetValueMust(types.StringType, elements) } -type PagerDutyExtensionServiceNowConfig struct { +type pagerDutyExtensionServiceNowConfig struct { User string `json:"snow_user"` Password string `json:"snow_password,omitempty"` SyncOptions string `json:"sync_options"` diff --git a/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go b/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go index 9dbe6e0cd..c7142b1ed 100644 --- a/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go +++ b/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go @@ -41,11 +41,11 @@ func testSweepExtensionServiceNow(_ string) error { } func TestAccPagerDutyExtensionServiceNow_Basic(t *testing.T) { - extension_name := id.PrefixedUniqueId("tf-") - extension_name_updated := id.PrefixedUniqueId("tf-") + extensionName := id.PrefixedUniqueId("tf-") + extensionNameUpdated := id.PrefixedUniqueId("tf-") name := id.PrefixedUniqueId("tf-") url := "https://example.com/receive_a_pagerduty_webhook" - url_updated := "https://example.com/webhook_foo" + urlUpdated := "https://example.com/webhook_foo" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -53,11 +53,11 @@ func TestAccPagerDutyExtensionServiceNow_Basic(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionServiceNowDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extensionName, url, "false", "any"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionServiceNowExists("pagerduty_extension_servicenow.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension_servicenow.foo", "name", extension_name), + "pagerduty_extension_servicenow.foo", "name", extensionName), resource.TestCheckResourceAttr( "pagerduty_extension_servicenow.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( @@ -79,15 +79,15 @@ func TestAccPagerDutyExtensionServiceNow_Basic(t *testing.T) { ), }, { - Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extension_name_updated, url_updated, "true", "pd-users"), + Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extensionNameUpdated, urlUpdated, "true", "pd-users"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionServiceNowExists("pagerduty_extension_servicenow.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension_servicenow.foo", "name", extension_name_updated), + "pagerduty_extension_servicenow.foo", "name", extensionNameUpdated), resource.TestCheckResourceAttr( "pagerduty_extension_servicenow.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( - "pagerduty_extension_servicenow.foo", "endpoint_url", url_updated), + "pagerduty_extension_servicenow.foo", "endpoint_url", urlUpdated), resource.TestCheckResourceAttr( "pagerduty_extension_servicenow.foo", "html_url", ""), resource.TestCheckResourceAttr( @@ -150,7 +150,7 @@ func testAccCheckPagerDutyExtensionServiceNowExists(n string) resource.TestCheck } } -func testAccCheckPagerDutyExtensionServiceNowConfig(name string, extension_name string, url string, notify_types string, restrict string) string { +func testAccCheckPagerDutyExtensionServiceNowConfig(name string, extensionName string, url string, _ string, _ string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { name = "%[1]v" @@ -206,5 +206,5 @@ resource "pagerduty_extension_servicenow" "foo"{ referer = "None" } -`, name, extension_name, url, restrict, notify_types) +`, name, extensionName, url) } diff --git a/pagerdutyplugin/resource_pagerduty_extension_test.go b/pagerdutyplugin/resource_pagerduty_extension_test.go index c63934d5d..08c0d9cf7 100644 --- a/pagerdutyplugin/resource_pagerduty_extension_test.go +++ b/pagerdutyplugin/resource_pagerduty_extension_test.go @@ -21,7 +21,7 @@ func init() { }) } -func testSweepExtension(region string) error { +func testSweepExtension(_ string) error { ctx := context.Background() resp, err := testAccProvider.client.ListExtensionsWithContext(ctx, pagerduty.ListExtensionOptions{}) @@ -42,11 +42,11 @@ func testSweepExtension(region string) error { } func TestAccPagerDutyExtension_Basic(t *testing.T) { - extension_name := id.PrefixedUniqueId("tf-") - extension_name_updated := id.PrefixedUniqueId("tf-") + extensionName := id.PrefixedUniqueId("tf-") + extensionNameUpdated := id.PrefixedUniqueId("tf-") name := id.PrefixedUniqueId("tf-") url := "https://example.com/receive_a_pagerduty_webhook" - url_updated := "https://example.com/webhook_foo" + urlUpdated := "https://example.com/webhook_foo" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -54,11 +54,11 @@ func TestAccPagerDutyExtension_Basic(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionConfig(name, extensionName, url, "false", "any"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionExists("pagerduty_extension.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension.foo", "name", extension_name), + "pagerduty_extension.foo", "name", extensionName), resource.TestCheckResourceAttr( "pagerduty_extension.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( @@ -70,15 +70,15 @@ func TestAccPagerDutyExtension_Basic(t *testing.T) { ), }, { - Config: testAccCheckPagerDutyExtensionConfig(name, extension_name_updated, url_updated, "true", "pd-users"), + Config: testAccCheckPagerDutyExtensionConfig(name, extensionNameUpdated, urlUpdated, "true", "pd-users"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionExists("pagerduty_extension.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension.foo", "name", extension_name_updated), + "pagerduty_extension.foo", "name", extensionNameUpdated), resource.TestCheckResourceAttr( "pagerduty_extension.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( - "pagerduty_extension.foo", "endpoint_url", url_updated), + "pagerduty_extension.foo", "endpoint_url", urlUpdated), resource.TestCheckResourceAttrWith( "pagerduty_extension.foo", "config", util.CheckJSONEqual("{\"notify_types\":{\"acknowledge\":true,\"assignments\":true,\"resolve\":true},\"restrict\":\"pd-users\"}")), ), @@ -127,7 +127,7 @@ func testAccCheckPagerDutyExtensionExists(n string) resource.TestCheckFunc { } } -func testAccCheckPagerDutyExtensionConfig(name string, extension_name string, url string, notify_types string, restrict string) string { +func testAccCheckPagerDutyExtensionConfig(name string, extensionName string, url string, notifyTypes string, restrict string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { name = "%[1]v" @@ -187,5 +187,5 @@ resource "pagerduty_extension" "foo"{ EOF } -`, name, extension_name, url, restrict, notify_types) +`, name, extensionName, url, restrict, notifyTypes) } diff --git a/pagerdutyplugin/resource_pagerduty_tag.go b/pagerdutyplugin/resource_pagerduty_tag.go index 6a9d75f3b..b1a9115ef 100644 --- a/pagerdutyplugin/resource_pagerduty_tag.go +++ b/pagerdutyplugin/resource_pagerduty_tag.go @@ -26,15 +26,15 @@ var ( _ resource.ResourceWithImportState = (*resourceTag)(nil) ) -func (r *resourceTag) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceTag) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } -func (r *resourceTag) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *resourceTag) Metadata(_ context.Context, _ resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = "pagerduty_tag" } -func (r *resourceTag) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { +func (r *resourceTag) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "label": schema.StringAttribute{ @@ -106,7 +106,7 @@ func (r *resourceTag) Read(ctx context.Context, req resource.ReadRequest, resp * resp.State.Set(ctx, &model) } -func (r *resourceTag) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { +func (r *resourceTag) Update(_ context.Context, _ resource.UpdateRequest, _ *resource.UpdateResponse) { } func (r *resourceTag) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { @@ -143,7 +143,7 @@ func (r *resourceTag) ImportState(ctx context.Context, req resource.ImportStateR type resourceTagModel struct { ID types.String `tfsdk:"id"` - HtmlUrl types.String `tfsdk:"html_url"` + HTMLURL types.String `tfsdk:"html_url"` Label types.String `tfsdk:"label"` Summary types.String `tfsdk:"summary"` } @@ -159,7 +159,7 @@ func buildTag(model *resourceTagModel) *pagerduty.Tag { func flattenTag(tag *pagerduty.Tag) resourceTagModel { model := resourceTagModel{ ID: types.StringValue(tag.ID), - HtmlUrl: types.StringValue(tag.HTMLURL), + HTMLURL: types.StringValue(tag.HTMLURL), Label: types.StringValue(tag.Label), Summary: types.StringValue(tag.Summary), } diff --git a/pagerdutyplugin/resource_pagerduty_tag_assignment.go b/pagerdutyplugin/resource_pagerduty_tag_assignment.go index 0acc15380..cc8fcaf55 100644 --- a/pagerdutyplugin/resource_pagerduty_tag_assignment.go +++ b/pagerdutyplugin/resource_pagerduty_tag_assignment.go @@ -196,7 +196,7 @@ func (r *resourceTagAssignment) isFoundTagAssignment(ctx context.Context, entity return isFound } -func (r *resourceTagAssignment) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { +func (r *resourceTagAssignment) Update(_ context.Context, _ resource.UpdateRequest, _ *resource.UpdateResponse) { } func (r *resourceTagAssignment) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { @@ -240,7 +240,7 @@ func (r *resourceTagAssignment) Delete(ctx context.Context, req resource.DeleteR resp.State.RemoveResource(ctx) } -func (r *resourceTagAssignment) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceTagAssignment) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go b/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go index e4c4af901..01bca7169 100644 --- a/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go +++ b/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go @@ -209,7 +209,7 @@ func testAccCheckPagerDutyTagAssignmentExists(n, entityType string) resource.Tes return err } // find tag the test created - var isFound bool = false + isFound := false for _, tag := range response.Tags { if tag.ID == tagID { isFound = true diff --git a/pagerdutyplugin/resource_pagerduty_tag_test.go b/pagerdutyplugin/resource_pagerduty_tag_test.go index f6d317324..8723776ac 100644 --- a/pagerdutyplugin/resource_pagerduty_tag_test.go +++ b/pagerdutyplugin/resource_pagerduty_tag_test.go @@ -20,7 +20,7 @@ func init() { }) } -func testSweepTag(region string) error { +func testSweepTag(_ string) error { client := testAccProvider.client ctx := context.Background() From ec6019e3c94cfa4b0e2fcdc766d31252bc59b48b Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Thu, 2 May 2024 23:00:46 -0400 Subject: [PATCH 02/11] Add util/validate folder --- pagerdutyplugin/provider_test.go | 14 ++++ util/build.go | 23 ++++++ util/enumtypes/int64.go | 105 +++++++++++++++++++++++++ util/enumtypes/string.go | 100 +++++++++++++++++++++++ util/rangetypes/int64.go | 91 +++++++++++++++++++++ util/string_describer.go | 13 +++ util/tztypes/string.go | 90 +++++++++++++++++++++ util/util.go | 12 +-- util/validate/alternatives_for_path.go | 28 +++++++ util/validate/forbid_a_if_b_equal.go | 73 +++++++++++++++++ util/validate/is_allowed_string.go | 43 ++++++++++ util/validate/require.go | 53 +++++++++++++ util/validate/require_a_if_b_equal.go | 57 ++++++++++++++ util/validate/require_list_size.go | 39 +++++++++ util/validate/timezone.go | 33 ++++++++ 15 files changed, 768 insertions(+), 6 deletions(-) create mode 100644 util/build.go create mode 100644 util/enumtypes/int64.go create mode 100644 util/enumtypes/string.go create mode 100644 util/rangetypes/int64.go create mode 100644 util/string_describer.go create mode 100644 util/tztypes/string.go create mode 100644 util/validate/alternatives_for_path.go create mode 100644 util/validate/forbid_a_if_b_equal.go create mode 100644 util/validate/is_allowed_string.go create mode 100644 util/validate/require.go create mode 100644 util/validate/require_a_if_b_equal.go create mode 100644 util/validate/require_list_size.go create mode 100644 util/validate/timezone.go diff --git a/pagerdutyplugin/provider_test.go b/pagerdutyplugin/provider_test.go index ea839e2b2..f562e13bb 100644 --- a/pagerdutyplugin/provider_test.go +++ b/pagerdutyplugin/provider_test.go @@ -84,3 +84,17 @@ func testAccTimeNow() time.Time { } return util.TimeNowInLoc(name) } + +func testAccPreCheckPagerDutyAbility(t *testing.T, ability string) { + if v := os.Getenv("PAGERDUTY_TOKEN"); v == "" { + t.Fatal("PAGERDUTY_TOKEN must be set for acceptance tests") + } + if v := os.Getenv("PAGERDUTY_USER_TOKEN"); v == "" { + t.Fatal("PAGERDUTY_USER_TOKEN must be set for acceptance tests") + } + + ctx := context.Background() + if err := testAccProvider.client.TestAbilityWithContext(ctx, ability); err != nil { + t.Skipf("Missing ability: %s. Skipping test", ability) + } +} diff --git a/util/build.go b/util/build.go new file mode 100644 index 000000000..80bec4b65 --- /dev/null +++ b/util/build.go @@ -0,0 +1,23 @@ +package util + +import ( + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func StringToUintPointer(p path.Path, s types.String, diags *diag.Diagnostics) *uint { + if s.IsNull() || s.IsUnknown() || s.ValueString() == "" || s.ValueString() == "null" { + return nil + } + if val, err := strconv.Atoi(s.ValueString()); err == nil { + uintvalue := uint(val) + return &uintvalue + } else { + diags.AddError(fmt.Sprintf("Value for %q is not a valid number", p), err.Error()) + } + return nil +} diff --git a/util/enumtypes/int64.go b/util/enumtypes/int64.go new file mode 100644 index 000000000..0aab6bf94 --- /dev/null +++ b/util/enumtypes/int64.go @@ -0,0 +1,105 @@ +package enumtypes + +import ( + "context" + "fmt" + "math/big" + "slices" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type Int64Value struct { + basetypes.Int64Value + EnumType Int64Type +} + +func NewInt64Null(t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Null(), EnumType: t} +} + +func NewInt64Value(v int64, t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Value(v), EnumType: t} +} + +func (s Int64Value) Type(_ context.Context) attr.Type { + return s.EnumType +} + +type Int64Type struct { + basetypes.Int64Type + OneOf []int64 +} + +func (t Int64Type) Int64() string { + return "enumtypes.Int64Type" +} + +func (t Int64Type) Equal(o attr.Type) bool { + if t2, ok := o.(Int64Type); ok { + return slices.Equal(t.OneOf, t2.OneOf) + } + return t.Int64Type.Equal(o) +} + +func (t Int64Type) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.Number) { + err := fmt.Errorf("expected Int64 value, received %T with value: %v", in, in) + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return diags + } + + if !in.IsKnown() || in.IsNull() { + return diags + } + + var valueFloat big.Float + if err := in.As(&valueFloat); err != nil { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return + } + valueInt64, _ := valueFloat.Int64() + + found := false + for _, v := range t.OneOf { + if v == valueInt64 { + found = true + break + } + } + + if !found { + diags.AddAttributeError( + path, + "Invalid Int64 Value", + fmt.Sprintf( + "A string value was provided that is not valid.\n"+ + "Given Value: %v\n"+ + "Expecting One Of: %v", + valueInt64, + t.OneOf, + ), + ) + return + } + + return +} diff --git a/util/enumtypes/string.go b/util/enumtypes/string.go new file mode 100644 index 000000000..1d3a522c0 --- /dev/null +++ b/util/enumtypes/string.go @@ -0,0 +1,100 @@ +package enumtypes + +import ( + "context" + "fmt" + "slices" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type StringValue struct { + basetypes.StringValue + EnumType StringType +} + +func NewStringNull(t StringType) StringValue { + return StringValue{StringValue: basetypes.NewStringNull(), EnumType: t} +} + +func NewStringValue(v string, t StringType) StringValue { + return StringValue{StringValue: basetypes.NewStringValue(v), EnumType: t} +} + +func (s StringValue) Type(_ context.Context) attr.Type { + return s.EnumType +} + +type StringType struct { + basetypes.StringType + OneOf []string +} + +func (t StringType) String() string { + return "enumtypes.StringType" +} + +func (t StringType) Equal(o attr.Type) bool { + if t2, ok := o.(StringType); ok { + return slices.Equal(t.OneOf, t2.OneOf) + } + return t.StringType.Equal(o) +} + +func (t StringType) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.String) { + err := fmt.Errorf("expected String value, received %T with value: %v", in, in) + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return diags + } + + if !in.IsKnown() || in.IsNull() { + return diags + } + + var valueString string + if err := in.As(&valueString); err != nil { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return + } + + found := false + for _, v := range t.OneOf { + if v == valueString { + found = true + break + } + } + + if !found { + diags.AddAttributeError( + path, + "Invalid String Value", + "A string value was provided that is not valid.\n"+ + "Given Value: "+valueString+"\n"+ + "Expecting One Of: "+strings.Join(t.OneOf, ", "), + ) + return + } + + return +} diff --git a/util/rangetypes/int64.go b/util/rangetypes/int64.go new file mode 100644 index 000000000..c23467fb1 --- /dev/null +++ b/util/rangetypes/int64.go @@ -0,0 +1,91 @@ +package rangetypes + +import ( + "context" + "fmt" + "math/big" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type Int64Value struct { + basetypes.Int64Value + RangeType Int64Type +} + +func NewInt64Null(t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Null(), RangeType: t} +} + +func NewInt64Value(v int64, t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Value(v), RangeType: t} +} + +func (s Int64Value) Type(_ context.Context) attr.Type { + return s.RangeType +} + +type Int64Type struct { + basetypes.Int64Type + Start int64 + End int64 +} + +func (t Int64Type) String() string { + return "rangetypes.Int64Type" +} + +func (t Int64Type) Equal(o attr.Type) bool { + if t2, ok := o.(Int64Type); ok { + return t.Start == t2.Start && t.End == t2.End + } + return t.Int64Type.Equal(o) +} + +func (t Int64Type) addTypeValidationError(err error, path path.Path, diags *diag.Diagnostics) { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) +} + +func (t Int64Type) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.Number) { + err := fmt.Errorf("expected Int64 value, received %T with value: %v", in, in) + t.addTypeValidationError(err, path, &diags) + return + } + + if !in.IsKnown() || in.IsNull() { + return + } + + var valueFloat big.Float + if err := in.As(&valueFloat); err != nil { + t.addTypeValidationError(err, path, &diags) + return + } + valueInt64, _ := valueFloat.Int64() + + if valueInt64 < t.Start || valueInt64 > int64(t.End) { + diags.AddAttributeError( + path, + "Invalid Int64 Value", + fmt.Sprintf("A value was provided that is not inside valid range (%v, %v).\n"+ + "Given Value: %v", t.Start, t.End, valueInt64), + ) + return + } + + return +} diff --git a/util/string_describer.go b/util/string_describer.go new file mode 100644 index 000000000..9528a0d6a --- /dev/null +++ b/util/string_describer.go @@ -0,0 +1,13 @@ +package util + +import "context" + +type StringDescriber struct{ Value string } + +func (d StringDescriber) MarkdownDescription(context.Context) string { + return d.Value +} + +func (d StringDescriber) Description(ctx context.Context) string { + return d.MarkdownDescription(ctx) +} diff --git a/util/tztypes/string.go b/util/tztypes/string.go new file mode 100644 index 000000000..315d9edaf --- /dev/null +++ b/util/tztypes/string.go @@ -0,0 +1,90 @@ +package tztypes + +import ( + "context" + "fmt" + + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type StringValue struct { + basetypes.StringValue +} + +func NewStringNull() StringValue { + return StringValue{StringValue: basetypes.NewStringNull()} +} + +func NewStringValue(v string) StringValue { + return StringValue{StringValue: basetypes.NewStringValue(v)} +} + +func (s StringValue) Type(_ context.Context) attr.Type { + return StringType{} +} + +type StringType struct { + basetypes.StringType +} + +func (t StringType) String() string { + return "tztypes.StringType" +} + +func (t StringType) Equal(o attr.Type) bool { + _, ok := o.(StringType) + if ok { + return true + } + + return t.StringType.Equal(o) +} + +func (t StringType) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.String) { + err := fmt.Errorf("expected String value, received %T with value: %v", in, in) + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return diags + } + + if !in.IsKnown() || in.IsNull() { + return diags + } + + var valueString string + if err := in.As(&valueString); err != nil { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return + } + + if !util.IsValidTZ(valueString) { + diags.AddAttributeError( + path, + "Invalid String Value", + "A string value was provided that is not a valid timezone.\n"+ + "Given Value: "+valueString, + ) + return + } + + return +} diff --git a/util/util.go b/util/util.go index 72e91798d..9fa34c6fa 100644 --- a/util/util.go +++ b/util/util.go @@ -292,16 +292,16 @@ func ResourcePagerDutyParseColonCompoundID(id string) (string, string, error) { return parts[0], parts[1], nil } +func IsValidTZ(v string) bool { + foundAt := sort.SearchStrings(validTZ, v) + return foundAt < len(validTZ) && validTZ[foundAt] == v +} + func ValidateTZValueDiagFunc(v interface{}, p cty.Path) diag.Diagnostics { var diags diag.Diagnostics value := v.(string) - valid := false - - foundAt := sort.SearchStrings(validTZ, value) - if foundAt < len(validTZ) && validTZ[foundAt] == value { - valid = true - } + valid := IsValidTZ(value) if !valid { diags = append(diags, diag.Diagnostic{ diff --git a/util/validate/alternatives_for_path.go b/util/validate/alternatives_for_path.go new file mode 100644 index 000000000..599a7990a --- /dev/null +++ b/util/validate/alternatives_for_path.go @@ -0,0 +1,28 @@ +package validate + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +func AlternativesForPath(p path.Path, alt []attr.Value) *alternativesForPathValidator { + return &alternativesForPathValidator{Path: p, Alternatives: alt} +} + +type alternativesForPathValidator struct { + Path path.Path + Alternatives []attr.Value +} + +var _ validator.String = (*alternativesForPathValidator)(nil) + +func (v *alternativesForPathValidator) Description(_ context.Context) string { return "" } +func (v *alternativesForPathValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +func (v *alternativesForPathValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { +} diff --git a/util/validate/forbid_a_if_b_equal.go b/util/validate/forbid_a_if_b_equal.go new file mode 100644 index 000000000..d918ea33d --- /dev/null +++ b/util/validate/forbid_a_if_b_equal.go @@ -0,0 +1,73 @@ +package validate + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// ForbidAIfBEqual raises an error if path `a` is not null when path `b` is +// equal to expected value `exp`. +func ForbidAIfBEqual(a, b path.Path, expected attr.Value) resource.ConfigValidator { + return &forbidIfEqual{ + dst: a, + src: b, + exp: expected, + } +} + +// ForbidAIfBEqual raises an error if path `a` is not null when path `b` is +// equal to expected value `exp`. Raising message `msg` when invalid. +func ForbidAIfBEqualWithMessage(a, b path.Path, expected attr.Value, message string) resource.ConfigValidator { + return &forbidIfEqual{ + dst: a, + src: b, + exp: expected, + msg: message, + } +} + +type forbidIfEqual struct { + dst path.Path + src path.Path + exp attr.Value + msg string +} + +func (v *forbidIfEqual) Description(ctx context.Context) string { return "" } +func (v *forbidIfEqual) MarkdownDescription(ctx context.Context) string { return "" } + +func (v *forbidIfEqual) ValidateResource(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + var src attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.src, &src)...) + if resp.Diagnostics.HasError() { + return + } + + if src.IsNull() || src.IsUnknown() { + return + } + + if src.Equal(v.exp) { + var dst attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.dst, &dst)...) + if resp.Diagnostics.HasError() { + return + } + if !dst.IsNull() { + detail := v.msg + if detail == "" { + detail = fmt.Sprintf("When the value of %s equals %s, field %s cannot have a value", v.src, v.exp, v.dst) + } + resp.Diagnostics.AddAttributeError( + v.dst, + fmt.Sprintf("Forbidden %s", v.dst), + detail, + ) + return + } + } +} diff --git a/util/validate/is_allowed_string.go b/util/validate/is_allowed_string.go new file mode 100644 index 000000000..4424544ec --- /dev/null +++ b/util/validate/is_allowed_string.go @@ -0,0 +1,43 @@ +package validate + +import ( + "context" + "strings" + "unicode" + + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +type validateIsAllowedString struct { + validateFn func(s string) bool + util.StringDescriber +} + +func (v validateIsAllowedString) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if ok := v.validateFn(req.ConfigValue.ValueString()); !ok { + resp.Diagnostics.AddError(v.Value, "") + } +} + +func IsAllowedString(mode util.StringContentValidationMode) validator.String { + switch mode { + case util.NoNonPrintableChars: + return validateIsAllowedString{ + func(s string) bool { + for _, char := range s { + if !unicode.IsPrint(char) { + return false + } + } + return s != "" && !strings.HasSuffix(s, " ") + }, + util.StringDescriber{Value: "Name can not be blank, nor contain non-printable characters. Trailing white spaces are not allowed either."}, + } + default: + return validateIsAllowedString{ + func(s string) bool { return false }, + util.StringDescriber{Value: "Invalid mode while using func IsAllowedStringValidator(mode StringContentValidationMode)"}, + } + } +} diff --git a/util/validate/require.go b/util/validate/require.go new file mode 100644 index 000000000..903bef2b1 --- /dev/null +++ b/util/validate/require.go @@ -0,0 +1,53 @@ +package validate + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// Require checks a path is not null. +func Require(p path.Path) resource.ConfigValidator { + return &requirePath{Path: p} +} + +type requirePath struct { + path.Path +} + +func (v *requirePath) Description(ctx context.Context) string { + return "Forces item to be present if its parent is present" +} + +func (v *requirePath) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +func (v *requirePath) ValidateResource(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + var parent attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.Path.ParentPath(), &parent)...) + if resp.Diagnostics.HasError() { + return + } + if parent.IsNull() { + return + } + + var src attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.Path, &src)...) + if resp.Diagnostics.HasError() { + return + } + + if src.IsNull() { + resp.Diagnostics.AddAttributeError( + v.Path, + fmt.Sprintf("Required %s", v.Path), + fmt.Sprintf("Field %s must have an explicit value", v.Path), + ) + return + } +} diff --git a/util/validate/require_a_if_b_equal.go b/util/validate/require_a_if_b_equal.go new file mode 100644 index 000000000..df03c7d55 --- /dev/null +++ b/util/validate/require_a_if_b_equal.go @@ -0,0 +1,57 @@ +package validate + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// RequireAIfBEqual checks path `a` is not null when path `b` is equal to `expected`. +func RequireAIfBEqual(a, b path.Path, expected attr.Value) resource.ConfigValidator { + return &requireIfEqual{ + dst: a, + src: b, + expected: expected, + } +} + +type requireIfEqual struct { + dst path.Path + src path.Path + expected attr.Value +} + +func (v *requireIfEqual) Description(ctx context.Context) string { return "" } +func (v *requireIfEqual) MarkdownDescription(ctx context.Context) string { return "" } + +func (v *requireIfEqual) ValidateResource(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + var src attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.src, &src)...) + if resp.Diagnostics.HasError() { + return + } + + if src.IsNull() || src.IsUnknown() { + return + } + + if src.Equal(v.expected) { + var dst attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.dst, &dst)...) + if resp.Diagnostics.HasError() { + return + } + + if dst.IsNull() || dst.IsUnknown() { + resp.Diagnostics.AddAttributeError( + v.dst, + fmt.Sprintf("Required %s", v.dst), + fmt.Sprintf("When the value of %s equals %s, field %s must have an explicit value", v.src, v.expected, v.dst), + ) + return + } + } +} diff --git a/util/validate/require_list_size.go b/util/validate/require_list_size.go new file mode 100644 index 000000000..c500c8386 --- /dev/null +++ b/util/validate/require_list_size.go @@ -0,0 +1,39 @@ +package validate + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// RequireList checks path `p` is a list at least with size 1. +func RequireList(p path.Path) resource.ConfigValidator { + return &requireListSize{Path: p} +} + +type requireListSize struct { + path.Path +} + +func (v *requireListSize) Description(ctx context.Context) string { return "" } +func (v *requireListSize) MarkdownDescription(ctx context.Context) string { return "" } + +func (v *requireListSize) ValidateResource(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + var src attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.Path, &src)...) + if resp.Diagnostics.HasError() { + return + } + + if src.IsNull() || src.IsUnknown() { + return + } + + size := 1 + if size < 1 { + resp.Diagnostics.AddAttributeError(v.Path, "Required to be a list with items", "") + return + } +} diff --git a/util/validate/timezone.go b/util/validate/timezone.go new file mode 100644 index 000000000..53d5ab358 --- /dev/null +++ b/util/validate/timezone.go @@ -0,0 +1,33 @@ +package validate + +import ( + "context" + "fmt" + "time" + + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +type timezoneValidator struct { + util.StringDescriber +} + +func Timezone() validator.String { + return &timezoneValidator{ + util.StringDescriber{Value: "checks time zone is supported by the machine's tzdata"}, + } +} + +func (v timezoneValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() { + return + } + value := req.ConfigValue.ValueString() + _, err := time.LoadLocation(value) + if err != nil { + resp.Diagnostics.AddAttributeError( + req.Path, fmt.Sprintf("Timezone %q is invalid", value), err.Error(), + ) + } +} From 6628ee92bd790b3d494ce53fcb8489be403e9bbc Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Thu, 8 Feb 2024 12:36:00 -0300 Subject: [PATCH 03/11] Migrate resource service Using protocol v5 Add validation to require support_hours when incident_urgency_rule is "use_support_hours" Improve pagerduty_service testing Add validations for pagerduty_service properties inside blocks --- pagerduty/provider.go | 1 + pagerduty/provider_test.go | 27 + ...chestration_service_cache_variable_test.go | 30 + ...agerduty_incident_workflow_trigger_test.go | 34 + ...ource_pagerduty_maintenance_window_test.go | 15 + .../import_pagerduty_service_test.go | 30 +- pagerdutyplugin/provider.go | 1 + pagerdutyplugin/resource_pagerduty_service.go | 1003 +++++++++++++++++ .../resource_pagerduty_service_test.go | 141 +-- util/build.go | 6 +- util/validate/require_a_if_b_equal.go | 3 +- util/validator.go | 63 ++ .../listvalidator/all.go | 57 + .../listvalidator/also_requires.go | 26 + .../listvalidator/any.go | 65 ++ .../listvalidator/any_with_all_warnings.go | 67 ++ .../listvalidator/at_least_one_of.go | 27 + .../listvalidator/conflicts_with.go | 27 + .../listvalidator/doc.go | 5 + .../listvalidator/exactly_one_of.go | 28 + .../listvalidator/is_required.go | 44 + .../listvalidator/size_at_least.go | 59 + .../listvalidator/size_at_most.go | 59 + .../listvalidator/size_between.go | 62 + .../listvalidator/unique_values.go | 68 ++ .../listvalidator/value_float64s_are.go | 119 ++ .../listvalidator/value_int64s_are.go | 119 ++ .../listvalidator/value_lists_are.go | 119 ++ .../listvalidator/value_maps_are.go | 119 ++ .../listvalidator/value_numbers_are.go | 119 ++ .../listvalidator/value_sets_are.go | 119 ++ .../listvalidator/value_strings_are.go | 119 ++ .../resource/schema/listplanmodifier/doc.go | 5 + .../listplanmodifier/requires_replace.go | 30 + .../listplanmodifier/requires_replace_if.go | 73 ++ .../requires_replace_if_configured.go | 34 + .../requires_replace_if_func.go | 25 + .../listplanmodifier/use_state_for_unknown.go | 55 + vendor/modules.txt | 2 + 39 files changed, 2921 insertions(+), 84 deletions(-) rename {pagerduty => pagerdutyplugin}/import_pagerduty_service_test.go (77%) create mode 100644 pagerdutyplugin/resource_pagerduty_service.go rename {pagerduty => pagerdutyplugin}/resource_pagerduty_service_test.go (95%) create mode 100644 util/validator.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/all.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/also_requires.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any_with_all_warnings.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/at_least_one_of.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/conflicts_with.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/doc.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/exactly_one_of.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/is_required.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_least.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_most.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_between.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/unique_values.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_float64s_are.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_int64s_are.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_lists_are.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_maps_are.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_numbers_are.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_sets_are.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_strings_are.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/doc.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_configured.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_func.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/use_state_for_unknown.go diff --git a/pagerduty/provider.go b/pagerduty/provider.go index d0c38576b..d257b2372 100644 --- a/pagerduty/provider.go +++ b/pagerduty/provider.go @@ -152,6 +152,7 @@ func Provider(isMux bool) *schema.Provider { delete(p.DataSourcesMap, "pagerduty_business_service") delete(p.ResourcesMap, "pagerduty_business_service") + delete(p.ResourcesMap, "pagerduty_service") } p.ConfigureContextFunc = func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { diff --git a/pagerduty/provider_test.go b/pagerduty/provider_test.go index 5077b3f02..8bd8daa58 100644 --- a/pagerduty/provider_test.go +++ b/pagerduty/provider_test.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/heimweh/go-pagerduty/pagerduty" ) @@ -331,3 +332,29 @@ func testAccGetPagerDutyAccountDomain(t *testing.T) string { } return accountDomain } + +func testAccCheckPagerDutyServiceExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No Service ID is set") + } + + client, _ := testAccProvider.Meta().(*Config).Client() + + found, _, err := client.Services.Get(rs.Primary.ID, &pagerduty.GetServiceOptions{}) + if err != nil { + return err + } + + if found.ID != rs.Primary.ID { + return fmt.Errorf("Service not found: %v - %v", rs.Primary.ID, found) + } + + return nil + } +} diff --git a/pagerduty/resource_pagerduty_event_orchestration_service_cache_variable_test.go b/pagerduty/resource_pagerduty_event_orchestration_service_cache_variable_test.go index 07fdee1f4..b41f8e51b 100644 --- a/pagerduty/resource_pagerduty_event_orchestration_service_cache_variable_test.go +++ b/pagerduty/resource_pagerduty_event_orchestration_service_cache_variable_test.go @@ -3,6 +3,8 @@ package pagerduty import ( "context" "fmt" + "log" + "strings" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" @@ -231,3 +233,31 @@ func testAccCheckPagerDutyEventOrchestrationServiceCacheVariableDeletedConfig(sv } `, svc, svc) } + +func testSweepService(region string) error { + config, err := sharedConfigForRegion(region) + if err != nil { + return err + } + + client, err := config.Client() + if err != nil { + return err + } + + resp, _, err := client.Services.List(&pagerduty.ListServicesOptions{}) + if err != nil { + return err + } + + for _, service := range resp.Services { + if strings.HasPrefix(service.Name, "test") || strings.HasPrefix(service.Name, "tf-") { + log.Printf("Destroying service %s (%s)", service.Name, service.ID) + if _, err := client.Services.Delete(service.ID); err != nil { + return err + } + } + } + + return nil +} diff --git a/pagerduty/resource_pagerduty_incident_workflow_trigger_test.go b/pagerduty/resource_pagerduty_incident_workflow_trigger_test.go index b20eed3e2..cec0513a8 100644 --- a/pagerduty/resource_pagerduty_incident_workflow_trigger_test.go +++ b/pagerduty/resource_pagerduty_incident_workflow_trigger_test.go @@ -509,3 +509,37 @@ func testAccCheckPagerDutyIncidentWorkflowTriggerExists(n string) resource.TestC return nil } } + +func testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, service string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} + +resource "pagerduty_service" "foo" { + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id +} +`, username, email, escalationPolicy, service) +} diff --git a/pagerduty/resource_pagerduty_maintenance_window_test.go b/pagerduty/resource_pagerduty_maintenance_window_test.go index 22045bee8..47547e6c5 100644 --- a/pagerduty/resource_pagerduty_maintenance_window_test.go +++ b/pagerduty/resource_pagerduty_maintenance_window_test.go @@ -219,3 +219,18 @@ resource "pagerduty_maintenance_window" "foo" { } `, desc, start, end) } + +func testAccCheckPagerDutyAddonDestroy(s *terraform.State) error { + client, _ := testAccProvider.Meta().(*Config).Client() + for _, r := range s.RootModule().Resources { + if r.Type != "pagerduty_addon" { + continue + } + + if _, _, err := client.Addons.Get(r.Primary.ID); err == nil { + return fmt.Errorf("Add-on still exists") + } + + } + return nil +} diff --git a/pagerduty/import_pagerduty_service_test.go b/pagerdutyplugin/import_pagerduty_service_test.go similarity index 77% rename from pagerduty/import_pagerduty_service_test.go rename to pagerdutyplugin/import_pagerduty_service_test.go index bb64798ea..ce82f2d92 100644 --- a/pagerduty/import_pagerduty_service_test.go +++ b/pagerdutyplugin/import_pagerduty_service_test.go @@ -15,9 +15,9 @@ func TestAccPagerDutyService_import(t *testing.T) { service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, service), @@ -32,16 +32,16 @@ func TestAccPagerDutyService_import(t *testing.T) { }) } -func TestAccPagerDutyServiceWithIncidentUrgency_import(t *testing.T) { +func TestAccPagerDutyService_import_WithIncidentUrgency(t *testing.T) { username := fmt.Sprintf("tf-%s", acctest.RandString(5)) email := fmt.Sprintf("%s@foo.test", username) escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceWithIncidentUrgencyRulesConfig(username, email, escalationPolicy, service), @@ -56,16 +56,16 @@ func TestAccPagerDutyServiceWithIncidentUrgency_import(t *testing.T) { }) } -func TestAccPagerDutyServiceWithAlertGroupingParameters_import(t *testing.T) { +func TestAccPagerDutyService_import_WithAlertGroupingParameters(t *testing.T) { username := fmt.Sprintf("tf-%s", acctest.RandString(5)) email := fmt.Sprintf("%s@foo.test", username) escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfigWithAlertContentGrouping(username, email, escalationPolicy, service), @@ -88,16 +88,16 @@ func TestAccPagerDutyServiceWithAlertGroupingParameters_import(t *testing.T) { }) } -func TestAccPagerDutyServiceWithAutoPauseNotifications_import(t *testing.T) { +func TestAccPagerDutyService_import_WithAutoPauseNotifications(t *testing.T) { username := fmt.Sprintf("tf-%s", acctest.RandString(5)) email := fmt.Sprintf("%s@foo.test", username) escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfigWithAutoPauseNotificationsParameters(username, email, escalationPolicy, service), diff --git a/pagerdutyplugin/provider.go b/pagerdutyplugin/provider.go index 06a6852db..96aeea17c 100644 --- a/pagerdutyplugin/provider.go +++ b/pagerdutyplugin/provider.go @@ -65,6 +65,7 @@ func (p *Provider) Resources(_ context.Context) [](func() resource.Resource) { func() resource.Resource { return &resourceBusinessService{} }, func() resource.Resource { return &resourceExtensionServiceNow{} }, func() resource.Resource { return &resourceExtension{} }, + func() resource.Resource { return &resourceService{} }, func() resource.Resource { return &resourceTagAssignment{} }, func() resource.Resource { return &resourceTag{} }, } diff --git a/pagerdutyplugin/resource_pagerduty_service.go b/pagerdutyplugin/resource_pagerduty_service.go new file mode 100644 index 000000000..5d852cac8 --- /dev/null +++ b/pagerdutyplugin/resource_pagerduty_service.go @@ -0,0 +1,1003 @@ +package pagerduty + +import ( + "context" + "fmt" + "log" + "strconv" + "time" + + "github.com/PagerDuty/go-pagerduty" + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/PagerDuty/terraform-provider-pagerduty/util/enumtypes" + "github.com/PagerDuty/terraform-provider-pagerduty/util/rangetypes" + "github.com/PagerDuty/terraform-provider-pagerduty/util/tztypes" + "github.com/PagerDuty/terraform-provider-pagerduty/util/validate" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" +) + +type resourceService struct { + client *pagerduty.Client +} + +var ( + _ resource.ResourceWithConfigure = (*resourceService)(nil) + _ resource.ResourceWithConfigValidators = (*resourceService)(nil) + _ resource.ResourceWithImportState = (*resourceService)(nil) +) + +func (r *resourceService) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) +} + +func (r *resourceService) ConfigValidators(ctx context.Context) []resource.ConfigValidator { + return []resource.ConfigValidator{ + validate.Require( + path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), + ), + validate.RequireAIfBEqual( + path.Root("support_hours"), + path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), + types.StringValue("use_support_hours"), + ), + validate.ForbidAIfBEqualWithMessage( + path.Root("incident_urgency_rule").AtListIndex(0).AtName("urgency"), + path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), + types.StringValue("use_support_hours"), + "general urgency cannot be set for a use_support_hours incident urgency rule type", + ), + validate.RequireList(path.Root("alert_grouping_parameters").AtListIndex(0).AtName("config")), + validate.RequireList(path.Root("incident_urgency_rule").AtListIndex(0).AtName("during_support_hours")), + validate.RequireList(path.Root("incident_urgency_rule").AtListIndex(0).AtName("outside_support_hours")), + validate.RequireList(path.Root("support_hours").AtListIndex(0).AtName("days_of_week")), // TODO at most 7 + } +} + +func (r *resourceService) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = "pagerduty_service" +} + +func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + + "name": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + validate.IsAllowedString(util.NoNonPrintableChars), + }, + }, + + "acknowledgement_timeout": schema.StringAttribute{ + Computed: true, + Optional: true, + Default: stringdefault.StaticString("1800"), + }, + + "alert_creation": schema.StringAttribute{ + Optional: true, + Computed: true, + Default: stringdefault.StaticString("create_alerts_and_incidents"), + Validators: []validator.String{ + stringvalidator.OneOf("create_alerts_and_incidents", "create_incidents"), + }, + }, + + "alert_grouping": schema.StringAttribute{ + Computed: true, + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf("time", "intelligent", "content_based"), + stringvalidator.ConflictsWith(path.MatchRoot("alert_grouping_parameters")), + }, + DeprecationMessage: "Use `alert_grouping_parameters.type`", + }, + + "alert_grouping_timeout": schema.StringAttribute{ + Computed: true, + Optional: true, + DeprecationMessage: "Use `alert_grouping_parameters.config.timeout`", + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot("alert_grouping_parameters")), + }, + }, + + "auto_resolve_timeout": schema.StringAttribute{ + Computed: true, + Optional: true, + Default: stringdefault.StaticString("14400"), + }, + + "description": schema.StringAttribute{ + Optional: true, + Computed: true, + Default: stringdefault.StaticString("Managed by Terraform"), + }, + + "created_at": schema.StringAttribute{Computed: true}, + "escalation_policy": schema.StringAttribute{Required: true}, + "html_url": schema.StringAttribute{Computed: true}, + "last_incident_timestamp": schema.StringAttribute{Computed: true}, + "response_play": schema.StringAttribute{Computed: true, Optional: true}, + "status": schema.StringAttribute{Computed: true}, + "type": schema.StringAttribute{Computed: true}, + + "alert_grouping_parameters": schema.ListAttribute{ + Optional: true, + Computed: true, + Validators: []validator.List{ + listvalidator.SizeBetween(1, 1), + listvalidator.ConflictsWith(path.MatchRoot("alert_grouping")), + listvalidator.ConflictsWith(path.MatchRoot("alert_grouping_timeout")), + }, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": alertGroupingParametersTypeType, + "config": types.ListType{ + ElemType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "timeout": types.Int64Type, + "fields": types.ListType{ElemType: types.StringType}, + "aggregate": alertGroupingParametersConfigAggregateType, + "time_window": alertGroupingParametersConfigTimeWindowType, + }, + }, + }, + }, + }, + }, + + "auto_pause_notifications_parameters": schema.ListAttribute{ + Optional: true, + Computed: true, + Validators: []validator.List{ + listvalidator.SizeAtLeast(1), + listvalidator.SizeAtMost(1), + }, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "enabled": types.BoolType, + "timeout": autoPauseNotificationsParametersTimeoutType, + }, + }, + }, + + "incident_urgency_rule": schema.ListAttribute{ + Optional: true, + Computed: true, + Validators: []validator.List{ + listvalidator.SizeAtLeast(1), + listvalidator.SizeAtMost(1), + }, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "urgency": types.StringType, + "during_support_hours": types.ListType{ + ElemType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, // require + "urgency": types.StringType, + }, + }, + }, + "outside_support_hours": types.ListType{ + ElemType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, // require + "urgency": types.StringType, + }, + }, + }, + }, + }, + }, + + "scheduled_actions": schema.ListAttribute{ + Optional: true, + Computed: true, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "to_urgency": types.StringType, + "at": types.ListType{ + ElemType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "name": types.StringType, + }, + }, + }, + }, + }, + PlanModifiers: []planmodifier.List{ + listplanmodifier.RequiresReplace(), + }, + Validators: []validator.List{ + listvalidator.SizeBetween(1, 1), + }, + }, + + "support_hours": schema.ListAttribute{ + Optional: true, + Computed: true, + Validators: []validator.List{ + listvalidator.SizeBetween(1, 1), + }, + PlanModifiers: []planmodifier.List{ + listplanmodifier.RequiresReplace(), + }, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "start_time": types.StringType, + "end_time": types.StringType, + "time_zone": tztypes.StringType{}, + "days_of_week": types.ListType{ + ElemType: types.StringType, + }, + }, + }, + }, + }, + } +} + +func (r *resourceService) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +func (r *resourceService) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var config resourceServiceModel + var model resourceServiceModel + + resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) + if d := req.Plan.Get(ctx, &model); d.HasError() { + resp.Diagnostics.Append(d...) + return + } + + serviceBody := buildService(ctx, &model, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + log.Printf("[INFO] Creating PagerDuty service %s", serviceBody.Name) + + service, err := r.client.CreateServiceWithContext(ctx, serviceBody) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error creating PagerDuty service %s", serviceBody.Name), + err.Error(), + ) + return + } + + err = retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + serviceResponse, err := r.client.GetServiceWithContext(ctx, service.ID, &pagerduty.GetServiceOptions{ + Includes: []string{"auto_pause_notifications_parameters"}, + }) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + model = flattenService(ctx, serviceResponse, config, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return retry.NonRetryableError(fmt.Errorf("%#v", resp.Diagnostics)) + } + return nil + }) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error reading PagerDuty service %s", service.ID), + err.Error(), + ) + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceService) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var state resourceServiceModel + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + + var id types.String + if d := req.State.GetAttribute(ctx, path.Root("id"), &id); d.HasError() { + resp.Diagnostics.Append(d...) + } + log.Printf("[INFO] Reading PagerDuty service %s", id) + + if id.IsNull() { + resp.State.RemoveResource(ctx) + return + } + + var model resourceServiceModel + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + serviceResponse, err := r.client.GetServiceWithContext(ctx, id.ValueString(), &pagerduty.GetServiceOptions{ + Includes: []string{"auto_pause_notifications_parameters"}, + }) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + if util.IsNotFoundError(err) { + resp.State.RemoveResource(ctx) + return nil + } + return retry.RetryableError(err) + } + model = flattenService(ctx, serviceResponse, state, &resp.Diagnostics) + return nil + }) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error reading PagerDuty service %s", id), + err.Error(), + ) + return + } + resp.State.Set(ctx, &model) +} + +func (r *resourceService) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var state resourceServiceModel + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + + var model resourceServiceModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &model)...) + if resp.Diagnostics.HasError() { + return + } + + plan := buildService(ctx, &model, &resp.Diagnostics) + log.Printf("[INFO] Updating PagerDuty service %s", plan.ID) + + service, err := r.client.UpdateServiceWithContext(ctx, plan) + if err != nil { + if util.IsNotFoundError(err) { + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError( + fmt.Sprintf("Error updating PagerDuty service %s", plan.ID), + err.Error(), + ) + return + } + model = flattenService(ctx, service, state, &resp.Diagnostics) + + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceService) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var id types.String + if d := req.State.GetAttribute(ctx, path.Root("id"), &id); d.HasError() { + resp.Diagnostics.Append(d...) + } + log.Printf("[INFO] Deleting PagerDuty service %s", id) + + if id.IsNull() { + resp.State.RemoveResource(ctx) + return + } + + if err := r.client.DeleteServiceWithContext(ctx, id.ValueString()); err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error deleting PagerDuty service %s", id), + err.Error(), + ) + return + } + + resp.State.RemoveResource(ctx) +} + +type resourceServiceModel struct { + ID types.String `tfsdk:"id"` + AcknowledgementTimeout types.String `tfsdk:"acknowledgement_timeout"` + AlertCreation types.String `tfsdk:"alert_creation"` + AlertGrouping types.String `tfsdk:"alert_grouping"` + AlertGroupingTimeout types.String `tfsdk:"alert_grouping_timeout"` + AutoResolveTimeout types.String `tfsdk:"auto_resolve_timeout"` + CreatedAt types.String `tfsdk:"created_at"` + Description types.String `tfsdk:"description"` + EscalationPolicy types.String `tfsdk:"escalation_policy"` + HtmlUrl types.String `tfsdk:"html_url"` + LastIncidentTimestamp types.String `tfsdk:"last_incident_timestamp"` + Name types.String `tfsdk:"name"` + ResponsePlay types.String `tfsdk:"response_play"` + Status types.String `tfsdk:"status"` + Type types.String `tfsdk:"type"` + AlertGroupingParameters types.List `tfsdk:"alert_grouping_parameters"` + AutoPauseNotificationsParameters types.List `tfsdk:"auto_pause_notifications_parameters"` + IncidentUrgencyRule types.List `tfsdk:"incident_urgency_rule"` + ScheduledActions types.List `tfsdk:"scheduled_actions"` + SupportHours types.List `tfsdk:"support_hours"` +} + +func buildService(ctx context.Context, model *resourceServiceModel, diags *diag.Diagnostics) pagerduty.Service { + service := pagerduty.Service{ + Name: model.Name.ValueString(), + Description: model.Description.ValueString(), + AlertCreation: model.AlertCreation.ValueString(), + AlertGrouping: model.AlertGrouping.ValueString(), + } + + u := util.StringToUintPointer(path.Root("auto_resolve_timeout"), model.AutoResolveTimeout, diags) + service.AutoResolveTimeout = u + + u = util.StringToUintPointer(path.Root("acknowledgement_timeout"), model.AcknowledgementTimeout, diags) + service.AcknowledgementTimeout = u + + u = util.StringToUintPointer(path.Root("alert_grouping_timeout"), model.AlertGroupingTimeout, diags) + service.AlertGroupingTimeout = u + + service.EscalationPolicy.ID = model.EscalationPolicy.ValueString() + service.EscalationPolicy.Type = "escalation_policy_reference" + + service.AlertGroupingParameters = buildAlertGroupingParameters(ctx, model.AlertGroupingParameters, diags) + service.AutoPauseNotificationsParameters = buildAutoPauseNotificationsParameters(ctx, model.AutoPauseNotificationsParameters, diags) + service.IncidentUrgencyRule = buildIncidentUrgencyRule(ctx, model.IncidentUrgencyRule, diags) + service.ScheduledActions = buildScheduledActions(ctx, model.ScheduledActions, diags) + service.SupportHours = buildSupportHours(ctx, model.SupportHours, diags) + + if !model.ResponsePlay.IsNull() && !model.ResponsePlay.IsUnknown() { + service.ResponsePlay = &pagerduty.APIObject{ + ID: model.ResponsePlay.ValueString(), + Type: "response_play_reference", + } + } + + return service +} + +func buildAlertGroupingParameters(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.AlertGroupingParameters { + if list.IsNull() || list.IsUnknown() { + return nil + } + var target []struct { + Type types.String `tfsdk:"type"` + Config types.List `tfsdk:"config"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + return nil + } + obj := target[0] + return &pagerduty.AlertGroupingParameters{ + Type: obj.Type.ValueString(), + Config: buildAlertGroupingConfig(ctx, obj.Config, diags), + } +} + +func buildAlertGroupingConfig(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.AlertGroupParamsConfig { + var target []struct { + Timeout types.Int64 `tfsdk:"timeout"` + Aggregate types.String `tfsdk:"aggregate"` + Fields types.List `tfsdk:"fields"` + TimeWindow types.Int64 `tfsdk:"time_window"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + return nil + } + obj := target[0] + + ut := uint(obj.Timeout.ValueInt64()) + + var fields []string + if d := obj.Fields.ElementsAs(ctx, &fields, false); d.HasError() { + diags.Append(d...) + return nil + } + + return &pagerduty.AlertGroupParamsConfig{ + Timeout: &ut, + Aggregate: obj.Aggregate.ValueString(), + Fields: fields, + } +} + +func buildAutoPauseNotificationsParameters(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.AutoPauseNotificationsParameters { + if list.IsNull() || list.IsUnknown() { + return nil + } + var target []struct { + Timeout types.Int64 `tfsdk:"timeout"` + Enabled types.Bool `tfsdk:"enabled"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + return nil + } + obj := target[0] + + return &pagerduty.AutoPauseNotificationsParameters{ + Enabled: obj.Enabled.ValueBool(), + Timeout: uint(obj.Timeout.ValueInt64()), + } +} + +func buildIncidentUrgencyRule(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.IncidentUrgencyRule { + if list.IsNull() || list.IsUnknown() { + return nil + } + var target []struct { + Type types.String `tfsdk:"type"` + Urgency types.String `tfsdk:"urgency"` + DuringSupportHours types.List `tfsdk:"during_support_hours"` + OutsideSupportHours types.List `tfsdk:"outside_support_hours"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + return nil + } + obj := target[0] + incidentUrgencyRule := &pagerduty.IncidentUrgencyRule{ + Type: obj.Type.ValueString(), + Urgency: obj.Urgency.ValueString(), + } + incidentUrgencyRule.DuringSupportHours = buildIncidentUrgencyType(ctx, obj.DuringSupportHours, diags) + incidentUrgencyRule.OutsideSupportHours = buildIncidentUrgencyType(ctx, obj.OutsideSupportHours, diags) + return incidentUrgencyRule +} + +func buildIncidentUrgencyType(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.IncidentUrgencyType { + var target []struct { + Type types.String `tfsdk:"type"` + Urgency types.String `tfsdk:"urgency"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + } + if len(target) < 1 { + return nil + } + obj := target[0] + return &pagerduty.IncidentUrgencyType{ + Type: obj.Type.ValueString(), + Urgency: obj.Urgency.ValueString(), + } +} + +func buildScheduledActions(ctx context.Context, list types.List, diags *diag.Diagnostics) []pagerduty.ScheduledAction { + scheduledActions := []pagerduty.ScheduledAction{} + if list.IsNull() || list.IsUnknown() { + return scheduledActions + } + var target []struct { + Type types.String `tfsdk:"type"` + ToUrgency types.String `tfsdk:"to_urgency"` + At types.List `tfsdk:"at"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + return scheduledActions + } + for _, src := range target { + dst := pagerduty.ScheduledAction{ + Type: src.Type.ValueString(), + ToUrgency: src.ToUrgency.ValueString(), + At: buildScheduledActionAt(ctx, src.At, diags), + } + scheduledActions = append(scheduledActions, dst) + } + return scheduledActions +} + +func buildScheduledActionAt(ctx context.Context, list types.List, diags *diag.Diagnostics) pagerduty.InlineModel { + var target []struct { + Type types.String `tfsdk:"type"` + Name types.String `tfsdk:"name"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + return pagerduty.InlineModel{} + } + obj := target[0] + return pagerduty.InlineModel{ + Type: obj.Type.ValueString(), + Name: obj.Name.ValueString(), + } +} + +func buildSupportHours(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.SupportHours { + if list.IsNull() || list.IsUnknown() { + return nil + } + var target []struct { + Type types.String `tfsdk:"type"` + Timezone types.String `tfsdk:"time_zone"` + StartTime types.String `tfsdk:"start_time"` + EndTime types.String `tfsdk:"end_time"` + DaysOfWeek types.List `tfsdk:"days_of_week"` + } + if d := list.ElementsAs(ctx, &target, false); d.HasError() { + diags.Append(d...) + return nil + } + obj := target[0] + supportHours := &pagerduty.SupportHours{ + Type: obj.Type.ValueString(), + Timezone: obj.Timezone.ValueString(), + StartTime: obj.StartTime.ValueString(), + EndTime: obj.EndTime.ValueString(), + } + + if !obj.DaysOfWeek.IsNull() { + daysOfWeekStr := []string{} + if d := obj.DaysOfWeek.ElementsAs(ctx, &daysOfWeekStr, false); d.HasError() { + diags.Append(d...) + return nil + } + daysOfWeek := make([]uint, 0, len(daysOfWeekStr)) + for _, s := range daysOfWeekStr { + v, err := strconv.Atoi(s) + if err != nil { + continue + } + daysOfWeek = append(daysOfWeek, uint(v)) + } + supportHours.DaysOfWeek = daysOfWeek + } + return supportHours +} + +var ( + alertGroupingParametersTypeType = enumtypes.StringType{ + OneOf: []string{"time", "intelligent", "content_based"}} + alertGroupingParametersConfigAggregateType = enumtypes.StringType{ + OneOf: []string{"all", "any"}} + alertGroupingParametersConfigTimeWindowType = rangetypes.Int64Type{ + Start: 300, End: 3600} + autoPauseNotificationsParametersTimeoutType = enumtypes.Int64Type{ + OneOf: []int64{120, 180, 300, 600, 900}} +) + +func flattenService(ctx context.Context, service *pagerduty.Service, state resourceServiceModel, diags *diag.Diagnostics) resourceServiceModel { + model := resourceServiceModel{ + ID: types.StringValue(service.ID), + AlertCreation: types.StringValue(service.AlertCreation), + CreatedAt: types.StringValue(service.CreateAt), + Description: types.StringValue(service.Description), + EscalationPolicy: types.StringValue(service.EscalationPolicy.ID), + HtmlUrl: types.StringValue(service.HTMLURL), + LastIncidentTimestamp: types.StringValue(service.LastIncidentTimestamp), + Name: types.StringValue(service.Name), + Status: types.StringValue(service.Status), + Type: types.StringValue(service.Type), + } + + if service.AcknowledgementTimeout != nil { + s := strconv.Itoa(int(*service.AcknowledgementTimeout)) + model.AcknowledgementTimeout = types.StringValue(s) + } else if state.AcknowledgementTimeout.ValueString() == "null" { + model.AcknowledgementTimeout = state.AcknowledgementTimeout + } + + if service.AutoResolveTimeout != nil { + s := strconv.Itoa(int(*service.AutoResolveTimeout)) + model.AutoResolveTimeout = types.StringValue(s) + } else if state.AutoResolveTimeout.ValueString() == "null" { + model.AutoResolveTimeout = state.AutoResolveTimeout + } + + if service.AlertGrouping != "" { + model.AlertGrouping = types.StringValue(service.AlertGrouping) + } + + if service.AlertGroupingTimeout != nil { + s := strconv.Itoa(int(*service.AlertGroupingTimeout)) + model.AlertGroupingTimeout = types.StringValue(s) + } + + model.AlertGroupingParameters = flattenAlertGroupingParameters(ctx, service.AlertGroupingParameters, diags) + model.AutoPauseNotificationsParameters = flattenAutoPauseNotificationsParameters(service.AutoPauseNotificationsParameters, diags) + model.IncidentUrgencyRule = flattenIncidentUrgencyRule(service.IncidentUrgencyRule, diags) + + if service.ResponsePlay != nil { + model.ResponsePlay = types.StringValue(service.ResponsePlay.ID) + } + + model.ScheduledActions = flattenScheduledActions(service.ScheduledActions, diags) + model.SupportHours = flattenSupportHours(service.SupportHours, diags) + + return model +} + +func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.AlertGroupingParameters, diags *diag.Diagnostics) types.List { + alertGroupParamsConfigObjectType := types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "aggregate": alertGroupingParametersConfigAggregateType, + "fields": types.ListType{ElemType: types.StringType}, + "timeout": types.Int64Type, + "time_window": alertGroupingParametersConfigTimeWindowType, + }, + } + alertGroupingParametersObjectType := types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": alertGroupingParametersTypeType, + "config": types.ListType{ElemType: alertGroupParamsConfigObjectType}, + }, + } + + nullList := types.ListNull(alertGroupingParametersObjectType) + if params == nil { + return nullList + } + + configList := types.ListNull(alertGroupParamsConfigObjectType) + if params.Config != nil { + fieldsList, d := types.ListValueFrom(ctx, types.StringType, params.Config.Fields) + if d.HasError() { + diags.Append(d...) + return nullList + } + + var timeout types.Int64 + if params.Config.Timeout != nil { + timeout = types.Int64Value(int64(*params.Config.Timeout)) + } + + aggregate := enumtypes.NewStringNull(alertGroupingParametersConfigAggregateType) + if params.Config.Aggregate != "" { + aggregate = enumtypes.NewStringValue(params.Config.Aggregate, alertGroupingParametersConfigAggregateType) + } + + configObj, d := types.ObjectValue(alertGroupParamsConfigObjectType.AttrTypes, map[string]attr.Value{ + "aggregate": aggregate, + "fields": fieldsList, + "timeout": timeout, + "time_window": types.Int64Null(), // TODO + }) + if d.HasError() { + diags.Append(d...) + return nullList + } + configList, d = types.ListValue(alertGroupParamsConfigObjectType, []attr.Value{configObj}) + if d.HasError() { + diags.Append(d...) + return nullList + } + } + + obj, d := types.ObjectValue(alertGroupingParametersObjectType.AttrTypes, map[string]attr.Value{ + "type": enumtypes.NewStringValue(params.Type, alertGroupingParametersTypeType), + "config": configList, + }) + diags.Append(d...) + if d.HasError() { + return nullList + } + + list, d := types.ListValue(alertGroupingParametersObjectType, []attr.Value{obj}) + diags.Append(d...) + if d.HasError() { + return nullList + } + + return list +} + +func flattenAutoPauseNotificationsParameters(params *pagerduty.AutoPauseNotificationsParameters, diags *diag.Diagnostics) types.List { + autoPauseNotificationsParametersObjectType := types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "enabled": types.BoolType, + "timeout": autoPauseNotificationsParametersTimeoutType, + }, + } + + nullList := types.ListNull(autoPauseNotificationsParametersObjectType) + if params == nil { + return nullList + } + + timeout := enumtypes.NewInt64Null(autoPauseNotificationsParametersTimeoutType) + if params.Enabled { + timeout = enumtypes.NewInt64Value( + int64(params.Timeout), + autoPauseNotificationsParametersTimeoutType, + ) + } + + obj, d := types.ObjectValue(autoPauseNotificationsParametersObjectType.AttrTypes, map[string]attr.Value{ + "enabled": types.BoolValue(params.Enabled), + "timeout": timeout, + }) + if d.HasError() { + diags.Append(d...) + return nullList + } + + list, d := types.ListValue(autoPauseNotificationsParametersObjectType, []attr.Value{obj}) + if d.HasError() { + diags.Append(d...) + return nullList + } + + return list +} + +var incidentUrgencyTypeObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "urgency": types.StringType, + }, +} + +func flattenIncidentUrgencyRule(rule *pagerduty.IncidentUrgencyRule, diags *diag.Diagnostics) types.List { + incidentUrgencyRuleObjectType := types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "urgency": types.StringType, + "during_support_hours": types.ListType{ElemType: incidentUrgencyTypeObjectType}, + "outside_support_hours": types.ListType{ElemType: incidentUrgencyTypeObjectType}, + }, + } + nullList := types.ListNull(incidentUrgencyTypeObjectType) + if rule == nil { + return nullList + } + + objValues := map[string]attr.Value{ + "type": types.StringValue(rule.Type), + "urgency": types.StringNull(), + "during_support_hours": types.ListNull(incidentUrgencyTypeObjectType), + "outside_support_hours": types.ListNull(incidentUrgencyTypeObjectType), + } + if rule.Urgency != "" { + objValues["urgency"] = types.StringValue(rule.Urgency) + } + if rule.DuringSupportHours != nil { + objValues["during_support_hours"] = flattenIncidentUrgencyType(rule.DuringSupportHours, diags) + } + if rule.OutsideSupportHours != nil { + objValues["outside_support_hours"] = flattenIncidentUrgencyType(rule.OutsideSupportHours, diags) + } + if diags.HasError() { + return nullList + } + + obj, d := types.ObjectValue(incidentUrgencyRuleObjectType.AttrTypes, objValues) + if d.HasError() { + diags.Append(d...) + return nullList + } + + list, d := types.ListValue(incidentUrgencyRuleObjectType, []attr.Value{obj}) + diags.Append(d...) + return list +} + +func flattenIncidentUrgencyType(urgency *pagerduty.IncidentUrgencyType, diags *diag.Diagnostics) types.List { + obj, d := types.ObjectValue(incidentUrgencyTypeObjectType.AttrTypes, map[string]attr.Value{ + "type": types.StringValue(urgency.Type), + "urgency": types.StringValue(urgency.Urgency), + }) + diags.Append(d...) + if d.HasError() { + return types.List{} + } + list, d := types.ListValue(incidentUrgencyTypeObjectType, []attr.Value{obj}) + diags.Append(d...) + return list +} + +var scheduledActionAtObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "name": types.StringType, + }, +} + +func flattenScheduledActions(actions []pagerduty.ScheduledAction, diags *diag.Diagnostics) types.List { + scheduledActionObjectType := types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "to_urgency": types.StringType, + "at": types.ListType{ElemType: scheduledActionAtObjectType}, + }, + } + nullList := types.ListNull(scheduledActionObjectType) + if len(actions) == 0 { + return nullList + } + + elements := []attr.Value{} + for _, action := range actions { + obj, d := types.ObjectValue(scheduledActionObjectType.AttrTypes, map[string]attr.Value{ + "type": types.StringValue(action.Type), + "to_urgency": types.StringValue(action.ToUrgency), + "at": flattenScheduledActionAt(action.At, diags), + }) + diags.Append(d...) + if diags.HasError() { + return nullList + } + elements = append(elements, obj) + } + + list, d := types.ListValue(scheduledActionObjectType, elements) + diags.Append(d...) + return list +} + +func flattenScheduledActionAt(at pagerduty.InlineModel, diags *diag.Diagnostics) types.List { + obj, d := types.ObjectValue(scheduledActionAtObjectType.AttrTypes, map[string]attr.Value{ + "type": types.StringValue(at.Type), + "name": types.StringValue(at.Name), + }) + if d.HasError() { + diags.Append(d...) + return types.List{} + } + list, d := types.ListValue(scheduledActionAtObjectType, []attr.Value{obj}) + diags.Append(d...) + return list +} + +func flattenSupportHours(hours *pagerduty.SupportHours, diags *diag.Diagnostics) types.List { + supportHoursObjectType := types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "start_time": types.StringType, + "end_time": types.StringType, + "time_zone": tztypes.StringType{}, + "days_of_week": types.ListType{ElemType: types.StringType}, + }, + } + nullList := types.ListNull(supportHoursObjectType) + if hours == nil { + return nullList + } + + daysOfWeek := []attr.Value{} + for _, dow := range hours.DaysOfWeek { + v := strconv.FormatInt(int64(dow), 10) + daysOfWeek = append(daysOfWeek, types.StringValue(v)) + } + + dowList, d := types.ListValue(types.StringType, daysOfWeek) + diags.Append(d...) + + obj, d := types.ObjectValue(supportHoursObjectType.AttrTypes, map[string]attr.Value{ + "type": types.StringValue(hours.Type), + "start_time": types.StringValue(hours.StartTime), + "end_time": types.StringValue(hours.EndTime), + "time_zone": tztypes.NewStringValue(hours.Timezone), + "days_of_week": dowList, + }) + if d.HasError() { + diags.Append(d...) + return nullList + } + + list, d := types.ListValue(supportHoursObjectType, []attr.Value{obj}) + diags.Append(d...) + return list +} diff --git a/pagerduty/resource_pagerduty_service_test.go b/pagerdutyplugin/resource_pagerduty_service_test.go similarity index 95% rename from pagerduty/resource_pagerduty_service_test.go rename to pagerdutyplugin/resource_pagerduty_service_test.go index 6f364e27d..7950c0b3e 100644 --- a/pagerduty/resource_pagerduty_service_test.go +++ b/pagerdutyplugin/resource_pagerduty_service_test.go @@ -1,16 +1,18 @@ package pagerduty import ( + "context" "fmt" "log" "regexp" "strings" "testing" + "github.com/PagerDuty/go-pagerduty" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/heimweh/go-pagerduty/pagerduty" ) func init() { @@ -21,17 +23,8 @@ func init() { } func testSweepService(region string) error { - config, err := sharedConfigForRegion(region) - if err != nil { - return err - } - - client, err := config.Client() - if err != nil { - return err - } - - resp, _, err := client.Services.List(&pagerduty.ListServicesOptions{}) + ctx := context.Background() + resp, err := testAccProvider.client.ListServicesWithContext(ctx, pagerduty.ListServiceOptions{}) if err != nil { return err } @@ -39,7 +32,7 @@ func testSweepService(region string) error { for _, service := range resp.Services { if strings.HasPrefix(service.Name, "test") || strings.HasPrefix(service.Name, "tf-") { log.Printf("Destroying service %s (%s)", service.Name, service.ID) - if _, err := client.Services.Delete(service.ID); err != nil { + if err := testAccProvider.client.DeleteServiceWithContext(ctx, service.ID); err != nil { return err } } @@ -56,9 +49,9 @@ func TestAccPagerDutyService_Basic(t *testing.T) { serviceUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, service), @@ -76,8 +69,8 @@ func TestAccPagerDutyService_Basic(t *testing.T) { "pagerduty_service.foo", "alert_creation", "create_alerts_and_incidents"), resource.TestCheckNoResourceAttr( "pagerduty_service.foo", "alert_grouping"), - resource.TestCheckResourceAttr( - "pagerduty_service.foo", "alert_grouping_timeout", "null"), + resource.TestCheckNoResourceAttr( + "pagerduty_service.foo", "alert_grouping_timeout"), resource.TestCheckResourceAttr( "pagerduty_service.foo", "incident_urgency_rule.#", "1"), resource.TestCheckResourceAttr( @@ -138,9 +131,9 @@ func TestAccPagerDutyService_FormatValidation(t *testing.T) { errMessageMatcher := "Name can not be blank, nor contain non-printable characters. Trailing white spaces are not allowed either." resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ // Just a valid name { @@ -331,9 +324,9 @@ func TestAccPagerDutyService_AlertGrouping(t *testing.T) { service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t); testAccPreCheckPagerDutyAbility(t, "preview_intelligent_alert_grouping") }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t); testAccPreCheckPagerDutyAbility(t, "preview_intelligent_alert_grouping") }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfigWithAlertGrouping(username, email, escalationPolicy, service), @@ -398,9 +391,9 @@ func TestAccPagerDutyService_AlertContentGrouping(t *testing.T) { service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfigWithAlertContentGrouping(username, email, escalationPolicy, service), @@ -631,9 +624,9 @@ func TestAccPagerDutyService_AlertContentGroupingIntelligentTimeWindow(t *testin service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfigWithAlertContentGroupingIntelligentTimeWindow(username, email, escalationPolicy, service), @@ -676,9 +669,9 @@ func TestAccPagerDutyService_AutoPauseNotificationsParameters(t *testing.T) { service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfigWithAutoPauseNotificationsParameters(username, email, escalationPolicy, service), @@ -758,9 +751,9 @@ func TestAccPagerDutyService_BasicWithIncidentUrgencyRules(t *testing.T) { serviceUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceWithIncidentUrgencyRulesConfig(username, email, escalationPolicy, service), @@ -828,7 +821,7 @@ func TestAccPagerDutyService_BasicWithIncidentUrgencyRules(t *testing.T) { }, { Config: testAccCheckPagerDutyServiceWithIncidentUrgencyRulesConfigError(username, email, escalationPolicy, serviceUpdated), - ExpectError: regexp.MustCompile("general urgency cannot be set for a use_support_hours incident urgency rule type"), + ExpectError: regexp.MustCompile(`general urgency cannot be set for a use_support_hours incident\s+urgency\s+rule\s+type`), }, { Config: testAccCheckPagerDutyServiceWithIncidentUrgencyRulesWithoutScheduledActionsConfig(username, email, escalationPolicy, service), @@ -960,9 +953,9 @@ func TestAccPagerDutyService_FromBasicToCustomIncidentUrgencyRules(t *testing.T) serviceUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, service), @@ -1065,9 +1058,9 @@ func TestAccPagerDutyService_SupportHoursChange(t *testing.T) { p_updated_service_id := &updated_service_id resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceWithIncidentUrgencyRulesConfig(username, email, escalationPolicy, service), @@ -1103,9 +1096,9 @@ func TestAccPagerDutyService_ResponsePlay(t *testing.T) { service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceWithResponsePlayConfig(username, email, escalationPolicy, responsePlay, service), @@ -1123,8 +1116,8 @@ func TestAccPagerDutyService_ResponsePlay(t *testing.T) { "pagerduty_service.foo", "alert_creation", "create_alerts_and_incidents"), resource.TestCheckNoResourceAttr( "pagerduty_service.foo", "alert_grouping"), - resource.TestCheckResourceAttr( - "pagerduty_service.foo", "alert_grouping_timeout", "null"), + resource.TestCheckNoResourceAttr( + "pagerduty_service.foo", "alert_grouping_timeout"), resource.TestCheckResourceAttr( "pagerduty_service.foo", "incident_urgency_rule.#", "1"), resource.TestCheckResourceAttr( @@ -1143,9 +1136,9 @@ func TestAccPagerDutyService_ResponsePlay(t *testing.T) { }) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyServiceDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyServiceWithNullResponsePlayConfig(username, email, escalationPolicy, responsePlay, service), @@ -1163,8 +1156,8 @@ func TestAccPagerDutyService_ResponsePlay(t *testing.T) { "pagerduty_service.foo", "alert_creation", "create_alerts_and_incidents"), resource.TestCheckNoResourceAttr( "pagerduty_service.foo", "alert_grouping"), - resource.TestCheckResourceAttr( - "pagerduty_service.foo", "alert_grouping_timeout", "null"), + resource.TestCheckNoResourceAttr( + "pagerduty_service.foo", "alert_grouping_timeout"), resource.TestCheckResourceAttr( "pagerduty_service.foo", "incident_urgency_rule.#", "1"), resource.TestCheckResourceAttr( @@ -1194,9 +1187,8 @@ func testAccCheckPagerDutyServiceSaveServiceId(p *string, n string) resource.Tes return fmt.Errorf("No Service ID is set") } - client, _ := testAccProvider.Meta().(*Config).Client() - - found, _, err := client.Services.Get(rs.Primary.ID, &pagerduty.GetServiceOptions{}) + ctx := context.Background() + found, err := testAccProvider.client.GetServiceWithContext(ctx, rs.Primary.ID, &pagerduty.GetServiceOptions{}) if err != nil { return err } @@ -1212,13 +1204,14 @@ func testAccCheckPagerDutyServiceSaveServiceId(p *string, n string) resource.Tes } func testAccCheckPagerDutyServiceDestroy(s *terraform.State) error { - client, _ := testAccProvider.Meta().(*Config).Client() - for _, r := range s.RootModule().Resources { - if r.Type != "pagerduty_service" { + for _, rs := range s.RootModule().Resources { + if rs.Type != "pagerduty_service" { continue } - if _, _, err := client.Services.Get(r.Primary.ID, &pagerduty.GetServiceOptions{}); err == nil { + ctx := context.Background() + _, err := testAccProvider.client.GetServiceWithContext(ctx, rs.Primary.ID, &pagerduty.GetServiceOptions{}) + if err == nil { return fmt.Errorf("Service still exists") } @@ -1237,9 +1230,8 @@ func testAccCheckPagerDutyServiceExists(n string) resource.TestCheckFunc { return fmt.Errorf("No Service ID is set") } - client, _ := testAccProvider.Meta().(*Config).Client() - - found, _, err := client.Services.Get(rs.Primary.ID, &pagerduty.GetServiceOptions{}) + ctx := context.Background() + found, err := testAccProvider.client.GetServiceWithContext(ctx, rs.Primary.ID, &pagerduty.GetServiceOptions{}) if err != nil { return err } @@ -1263,9 +1255,8 @@ func testAccCheckPagerDutyServiceResponsePlayNotExist(n string) resource.TestChe return fmt.Errorf("No Service ID is set") } - client, _ := testAccProvider.Meta().(*Config).Client() - - found, _, err := client.Services.Get(rs.Primary.ID, &pagerduty.GetServiceOptions{}) + ctx := context.Background() + found, err := testAccProvider.client.GetServiceWithContext(ctx, rs.Primary.ID, &pagerduty.GetServiceOptions{}) if err != nil { return err } @@ -2407,3 +2398,21 @@ resource "pagerduty_service" "foo" { } `, username, email, escalationPolicy, responsePlay, service) } + +func TestFlattenAlertGroupingParameters_Basic(t *testing.T) { + var diags diag.Diagnostics + var timeout uint = 1000 + params := &pagerduty.AlertGroupingParameters{ + Type: "foo", + Config: &pagerduty.AlertGroupParamsConfig{ + Timeout: &timeout, + Aggregate: "aggregate", + Fields: []string{"a", "b", "c"}, + }, + } + li := flattenAlertGroupingParameters(context.Background(), params, &diags) + if diags.HasError() { + t.Fatalf("unexpected error %s", diags) + } + t.Logf("%#v", li) +} diff --git a/util/build.go b/util/build.go index 80bec4b65..d83150a56 100644 --- a/util/build.go +++ b/util/build.go @@ -10,9 +10,13 @@ import ( ) func StringToUintPointer(p path.Path, s types.String, diags *diag.Diagnostics) *uint { - if s.IsNull() || s.IsUnknown() || s.ValueString() == "" || s.ValueString() == "null" { + if s.IsNull() || s.IsUnknown() || s.ValueString() == "" { return nil } + if s.ValueString() == "null" { + v := uint(0) + return &v + } if val, err := strconv.Atoi(s.ValueString()); err == nil { uintvalue := uint(val) return &uintvalue diff --git a/util/validate/require_a_if_b_equal.go b/util/validate/require_a_if_b_equal.go index df03c7d55..4ad8d4e95 100644 --- a/util/validate/require_a_if_b_equal.go +++ b/util/validate/require_a_if_b_equal.go @@ -44,8 +44,7 @@ func (v *requireIfEqual) ValidateResource(ctx context.Context, req resource.Vali if resp.Diagnostics.HasError() { return } - - if dst.IsNull() || dst.IsUnknown() { + if dst.IsNull() { resp.Diagnostics.AddAttributeError( v.dst, fmt.Sprintf("Required %s", v.dst), diff --git a/util/validator.go b/util/validator.go new file mode 100644 index 000000000..1db899498 --- /dev/null +++ b/util/validator.go @@ -0,0 +1,63 @@ +package util + +import ( + "strings" + "unicode" + + "github.com/hashicorp/go-cty/cty" + v2diag "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// ValidateIsAllowedString will always validate if string provided is not empty, +// neither has trailing white spaces. Additionally the string content validation +// will be done based on the `mode` set. +// +// mode: NoContentValidation | NoNonPrintableChars | NoNonPrintableCharsOrSpecialChars +func ReValidateIsAllowedString(mode StringContentValidationMode) schema.SchemaValidateDiagFunc { + return func(v interface{}, p cty.Path) v2diag.Diagnostics { + var diags v2diag.Diagnostics + + fillDiags := func() { + summary := "Name can not be blank. Trailing white spaces are not allowed either." + switch mode { + case NoNonPrintableChars: + summary = "Name can not be blank, nor contain non-printable characters. Trailing white spaces are not allowed either." + case NoNonPrintableCharsOrSpecialChars: + summary = "Name can not be blank, nor contain the characters '\\', '/', '&', '<', '>', or any non-printable characters. Trailing white spaces are not allowed either." + } + diags = append(diags, v2diag.Diagnostic{ + Severity: v2diag.Error, + Summary: summary, + AttributePath: p, + }) + } + + value := v.(string) + if value == "" { + fillDiags() + return diags + } + + for _, char := range value { + if (mode == NoNonPrintableChars || mode == NoNonPrintableCharsOrSpecialChars) && !unicode.IsPrint(char) { + fillDiags() + return diags + } + if mode == NoNonPrintableCharsOrSpecialChars { + switch char { + case '\\', '/', '&', '<', '>': + fillDiags() + return diags + } + } + } + + if strings.HasSuffix(value, " ") { + fillDiags() + return diags + } + + return diags + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/all.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/all.go new file mode 100644 index 000000000..a0ada2099 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/all.go @@ -0,0 +1,57 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// All returns a validator which ensures that any configured attribute value +// attribute value validates against all the given validators. +// +// Use of All is only necessary when used in conjunction with Any or AnyWithAllWarnings +// as the Validators field automatically applies a logical AND. +func All(validators ...validator.List) validator.List { + return allValidator{ + validators: validators, + } +} + +var _ validator.List = allValidator{} + +// allValidator implements the validator. +type allValidator struct { + validators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v allValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, subValidator := range v.validators { + descriptions = append(descriptions, subValidator.Description(ctx)) + } + + return fmt.Sprintf("Value must satisfy all of the validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v allValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v allValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + for _, subValidator := range v.validators { + validateResp := &validator.ListResponse{} + + subValidator.ValidateList(ctx, req, validateResp) + + resp.Diagnostics.Append(validateResp.Diagnostics...) + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/also_requires.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/also_requires.go new file mode 100644 index 000000000..9a666c9e3 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/also_requires.go @@ -0,0 +1,26 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// AlsoRequires checks that a set of path.Expression has a non-null value, +// if the current attribute or block also has a non-null value. +// +// This implements the validation logic declaratively within the schema. +// Refer to [datasourcevalidator.RequiredTogether], +// [providervalidator.RequiredTogether], or [resourcevalidator.RequiredTogether] +// for declaring this type of validation outside the schema definition. +// +// Relative path.Expression will be resolved using the attribute or block +// being validated. +func AlsoRequires(expressions ...path.Expression) validator.List { + return schemavalidator.AlsoRequiresValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any.go new file mode 100644 index 000000000..2fbb5f388 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any.go @@ -0,0 +1,65 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// Any returns a validator which ensures that any configured attribute value +// passes at least one of the given validators. +// +// To prevent practitioner confusion should non-passing validators have +// conflicting logic, only warnings from the passing validator are returned. +// Use AnyWithAllWarnings() to return warnings from non-passing validators +// as well. +func Any(validators ...validator.List) validator.List { + return anyValidator{ + validators: validators, + } +} + +var _ validator.List = anyValidator{} + +// anyValidator implements the validator. +type anyValidator struct { + validators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v anyValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, subValidator := range v.validators { + descriptions = append(descriptions, subValidator.Description(ctx)) + } + + return fmt.Sprintf("Value must satisfy at least one of the validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v anyValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v anyValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + for _, subValidator := range v.validators { + validateResp := &validator.ListResponse{} + + subValidator.ValidateList(ctx, req, validateResp) + + if !validateResp.Diagnostics.HasError() { + resp.Diagnostics = validateResp.Diagnostics + + return + } + + resp.Diagnostics.Append(validateResp.Diagnostics...) + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any_with_all_warnings.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any_with_all_warnings.go new file mode 100644 index 000000000..de9ead9a0 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any_with_all_warnings.go @@ -0,0 +1,67 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// AnyWithAllWarnings returns a validator which ensures that any configured +// attribute value passes at least one of the given validators. This validator +// returns all warnings, including failed validators. +// +// Use Any() to return warnings only from the passing validator. +func AnyWithAllWarnings(validators ...validator.List) validator.List { + return anyWithAllWarningsValidator{ + validators: validators, + } +} + +var _ validator.List = anyWithAllWarningsValidator{} + +// anyWithAllWarningsValidator implements the validator. +type anyWithAllWarningsValidator struct { + validators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v anyWithAllWarningsValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, subValidator := range v.validators { + descriptions = append(descriptions, subValidator.Description(ctx)) + } + + return fmt.Sprintf("Value must satisfy at least one of the validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v anyWithAllWarningsValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v anyWithAllWarningsValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + anyValid := false + + for _, subValidator := range v.validators { + validateResp := &validator.ListResponse{} + + subValidator.ValidateList(ctx, req, validateResp) + + if !validateResp.Diagnostics.HasError() { + anyValid = true + } + + resp.Diagnostics.Append(validateResp.Diagnostics...) + } + + if anyValid { + resp.Diagnostics = resp.Diagnostics.Warnings() + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/at_least_one_of.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/at_least_one_of.go new file mode 100644 index 000000000..2de2fbb07 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/at_least_one_of.go @@ -0,0 +1,27 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// AtLeastOneOf checks that of a set of path.Expression, +// including the attribute or block this validator is applied to, +// at least one has a non-null value. +// +// This implements the validation logic declaratively within the tfsdk.Schema. +// Refer to [datasourcevalidator.AtLeastOneOf], +// [providervalidator.AtLeastOneOf], or [resourcevalidator.AtLeastOneOf] +// for declaring this type of validation outside the schema definition. +// +// Any relative path.Expression will be resolved using the attribute or block +// being validated. +func AtLeastOneOf(expressions ...path.Expression) validator.List { + return schemavalidator.AtLeastOneOfValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/conflicts_with.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/conflicts_with.go new file mode 100644 index 000000000..a8f35d068 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/conflicts_with.go @@ -0,0 +1,27 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// ConflictsWith checks that a set of path.Expression, +// including the attribute or block the validator is applied to, +// do not have a value simultaneously. +// +// This implements the validation logic declaratively within the schema. +// Refer to [datasourcevalidator.Conflicting], +// [providervalidator.Conflicting], or [resourcevalidator.Conflicting] +// for declaring this type of validation outside the schema definition. +// +// Relative path.Expression will be resolved using the attribute or block +// being validated. +func ConflictsWith(expressions ...path.Expression) validator.List { + return schemavalidator.ConflictsWithValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/doc.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/doc.go new file mode 100644 index 000000000..a13b37615 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/doc.go @@ -0,0 +1,5 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// Package listvalidator provides validators for types.List attributes. +package listvalidator diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/exactly_one_of.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/exactly_one_of.go new file mode 100644 index 000000000..25fa59bf3 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/exactly_one_of.go @@ -0,0 +1,28 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// ExactlyOneOf checks that of a set of path.Expression, +// including the attribute or block the validator is applied to, +// one and only one attribute has a value. +// It will also cause a validation error if none are specified. +// +// This implements the validation logic declaratively within the schema. +// Refer to [datasourcevalidator.ExactlyOneOf], +// [providervalidator.ExactlyOneOf], or [resourcevalidator.ExactlyOneOf] +// for declaring this type of validation outside the schema definition. +// +// Relative path.Expression will be resolved using the attribute or block +// being validated. +func ExactlyOneOf(expressions ...path.Expression) validator.List { + return schemavalidator.ExactlyOneOfValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/is_required.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/is_required.go new file mode 100644 index 000000000..c4f8a6f97 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/is_required.go @@ -0,0 +1,44 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = isRequiredValidator{} + +// isRequiredValidator validates that a list has a configuration value. +type isRequiredValidator struct{} + +// Description describes the validation in plain text formatting. +func (v isRequiredValidator) Description(_ context.Context) string { + return "must have a configuration value as the provider has marked it as required" +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v isRequiredValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v isRequiredValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() { + resp.Diagnostics.Append(validatordiag.InvalidBlockDiagnostic( + req.Path, + v.Description(ctx), + )) + } +} + +// IsRequired returns a validator which ensures that any configured list has a value (not null). +// +// This validator is equivalent to the `Required` field on attributes and is only +// practical for use with `schema.ListNestedBlock` +func IsRequired() validator.List { + return isRequiredValidator{} +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_least.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_least.go new file mode 100644 index 000000000..bfe35e7d1 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_least.go @@ -0,0 +1,59 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = sizeAtLeastValidator{} + +// sizeAtLeastValidator validates that list contains at least min elements. +type sizeAtLeastValidator struct { + min int +} + +// Description describes the validation in plain text formatting. +func (v sizeAtLeastValidator) Description(_ context.Context) string { + return fmt.Sprintf("list must contain at least %d elements", v.min) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v sizeAtLeastValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v sizeAtLeastValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elems := req.ConfigValue.Elements() + + if len(elems) < v.min { + resp.Diagnostics.Append(validatordiag.InvalidAttributeValueDiagnostic( + req.Path, + v.Description(ctx), + fmt.Sprintf("%d", len(elems)), + )) + } +} + +// SizeAtLeast returns an AttributeValidator which ensures that any configured +// attribute value: +// +// - Is a List. +// - Contains at least min elements. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func SizeAtLeast(min int) validator.List { + return sizeAtLeastValidator{ + min: min, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_most.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_most.go new file mode 100644 index 000000000..f3e7b36d8 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_most.go @@ -0,0 +1,59 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = sizeAtMostValidator{} + +// sizeAtMostValidator validates that list contains at most max elements. +type sizeAtMostValidator struct { + max int +} + +// Description describes the validation in plain text formatting. +func (v sizeAtMostValidator) Description(_ context.Context) string { + return fmt.Sprintf("list must contain at most %d elements", v.max) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v sizeAtMostValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v sizeAtMostValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elems := req.ConfigValue.Elements() + + if len(elems) > v.max { + resp.Diagnostics.Append(validatordiag.InvalidAttributeValueDiagnostic( + req.Path, + v.Description(ctx), + fmt.Sprintf("%d", len(elems)), + )) + } +} + +// SizeAtMost returns an AttributeValidator which ensures that any configured +// attribute value: +// +// - Is a List. +// - Contains at most max elements. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func SizeAtMost(max int) validator.List { + return sizeAtMostValidator{ + max: max, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_between.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_between.go new file mode 100644 index 000000000..32c34d9e6 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_between.go @@ -0,0 +1,62 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = sizeBetweenValidator{} + +// sizeBetweenValidator validates that list contains at least min elements +// and at most max elements. +type sizeBetweenValidator struct { + min int + max int +} + +// Description describes the validation in plain text formatting. +func (v sizeBetweenValidator) Description(_ context.Context) string { + return fmt.Sprintf("list must contain at least %d elements and at most %d elements", v.min, v.max) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v sizeBetweenValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v sizeBetweenValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elems := req.ConfigValue.Elements() + + if len(elems) < v.min || len(elems) > v.max { + resp.Diagnostics.Append(validatordiag.InvalidAttributeValueDiagnostic( + req.Path, + v.Description(ctx), + fmt.Sprintf("%d", len(elems)), + )) + } +} + +// SizeBetween returns an AttributeValidator which ensures that any configured +// attribute value: +// +// - Is a List. +// - Contains at least min elements and at most max elements. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func SizeBetween(min, max int) validator.List { + return sizeBetweenValidator{ + min: min, + max: max, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/unique_values.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/unique_values.go new file mode 100644 index 000000000..6cfc3b73a --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/unique_values.go @@ -0,0 +1,68 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = uniqueValuesValidator{} + +// uniqueValuesValidator implements the validator. +type uniqueValuesValidator struct{} + +// Description returns the plaintext description of the validator. +func (v uniqueValuesValidator) Description(_ context.Context) string { + return "all values must be unique" +} + +// MarkdownDescription returns the Markdown description of the validator. +func (v uniqueValuesValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList implements the validation logic. +func (v uniqueValuesValidator) ValidateList(_ context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elements := req.ConfigValue.Elements() + + for indexOuter, elementOuter := range elements { + // Only evaluate known values for duplicates. + if elementOuter.IsUnknown() { + continue + } + + for indexInner := indexOuter + 1; indexInner < len(elements); indexInner++ { + elementInner := elements[indexInner] + + if elementInner.IsUnknown() { + continue + } + + if !elementInner.Equal(elementOuter) { + continue + } + + resp.Diagnostics.AddAttributeError( + req.Path, + "Duplicate List Value", + fmt.Sprintf("This attribute contains duplicate values of: %s", elementInner), + ) + } + } +} + +// UniqueValues returns a validator which ensures that any configured list +// only contains unique values. This is similar to using a set attribute type +// which inherently validates unique values, but with list ordering semantics. +// Null (unconfigured) and unknown (known after apply) values are skipped. +func UniqueValues() validator.List { + return uniqueValuesValidator{} +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_float64s_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_float64s_are.go new file mode 100644 index 000000000..708e08781 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_float64s_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueFloat64sAre returns an validator which ensures that any configured +// Float64 values passes each Float64 validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueFloat64sAre(elementValidators ...validator.Float64) validator.List { + return valueFloat64sAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueFloat64sAreValidator{} + +// valueFloat64sAreValidator validates that each Float64 member validates against each of the value validators. +type valueFloat64sAreValidator struct { + elementValidators []validator.Float64 +} + +// Description describes the validation in plain text formatting. +func (v valueFloat64sAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueFloat64sAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateFloat64 performs the validation. +func (v valueFloat64sAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.Float64Typable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Float64 values validator, however its values do not implement types.Float64Type or the types.Float64Typable interface for custom Float64 types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.Float64Valuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Float64 values validator, however its values do not implement types.Float64Type or the types.Float64Typable interface for custom Float64 types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToFloat64Value(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.Float64Request{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.Float64Response{} + + elementValidator.ValidateFloat64(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_int64s_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_int64s_are.go new file mode 100644 index 000000000..6cdc0ce05 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_int64s_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueInt64sAre returns an validator which ensures that any configured +// Int64 values passes each Int64 validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueInt64sAre(elementValidators ...validator.Int64) validator.List { + return valueInt64sAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueInt64sAreValidator{} + +// valueInt64sAreValidator validates that each Int64 member validates against each of the value validators. +type valueInt64sAreValidator struct { + elementValidators []validator.Int64 +} + +// Description describes the validation in plain text formatting. +func (v valueInt64sAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueInt64sAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateInt64 performs the validation. +func (v valueInt64sAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.Int64Typable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Int64 values validator, however its values do not implement types.Int64Type or the types.Int64Typable interface for custom Int64 types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.Int64Valuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Int64 values validator, however its values do not implement types.Int64Type or the types.Int64Typable interface for custom Int64 types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToInt64Value(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.Int64Request{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.Int64Response{} + + elementValidator.ValidateInt64(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_lists_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_lists_are.go new file mode 100644 index 000000000..6ebf116d7 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_lists_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueListsAre returns an validator which ensures that any configured +// List values passes each List validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueListsAre(elementValidators ...validator.List) validator.List { + return valueListsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueListsAreValidator{} + +// valueListsAreValidator validates that each List member validates against each of the value validators. +type valueListsAreValidator struct { + elementValidators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v valueListsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueListsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateSet performs the validation. +func (v valueListsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.ListTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a List values validator, however its values do not implement types.ListType or the types.ListTypable interface for custom List types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.ListValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a List values validator, however its values do not implement types.ListType or the types.ListTypable interface for custom List types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToListValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.ListRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.ListResponse{} + + elementValidator.ValidateList(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_maps_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_maps_are.go new file mode 100644 index 000000000..ececd13cc --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_maps_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueMapsAre returns an validator which ensures that any configured +// Map values passes each Map validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueMapsAre(elementValidators ...validator.Map) validator.List { + return valueMapsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueMapsAreValidator{} + +// valueMapsAreValidator validates that each Map member validates against each of the value validators. +type valueMapsAreValidator struct { + elementValidators []validator.Map +} + +// Description describes the validation in plain text formatting. +func (v valueMapsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueMapsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateMap performs the validation. +func (v valueMapsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.MapTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Map values validator, however its values do not implement types.MapType or the types.MapTypable interface for custom Map types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.MapValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Map values validator, however its values do not implement types.MapType or the types.MapTypable interface for custom Map types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToMapValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.MapRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.MapResponse{} + + elementValidator.ValidateMap(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_numbers_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_numbers_are.go new file mode 100644 index 000000000..7e75e98e1 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_numbers_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueNumbersAre returns an validator which ensures that any configured +// Number values passes each Number validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueNumbersAre(elementValidators ...validator.Number) validator.List { + return valueNumbersAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueNumbersAreValidator{} + +// valueNumbersAreValidator validates that each Number member validates against each of the value validators. +type valueNumbersAreValidator struct { + elementValidators []validator.Number +} + +// Description describes the validation in plain text formatting. +func (v valueNumbersAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueNumbersAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateNumber performs the validation. +func (v valueNumbersAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.NumberTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Number values validator, however its values do not implement types.NumberType or the types.NumberTypable interface for custom Number types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.NumberValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Number values validator, however its values do not implement types.NumberType or the types.NumberTypable interface for custom Number types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToNumberValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.NumberRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.NumberResponse{} + + elementValidator.ValidateNumber(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_sets_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_sets_are.go new file mode 100644 index 000000000..9f05ae117 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_sets_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueSetsAre returns an validator which ensures that any configured +// Set values passes each Set validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueSetsAre(elementValidators ...validator.Set) validator.List { + return valueSetsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueSetsAreValidator{} + +// valueSetsAreValidator validates that each set member validates against each of the value validators. +type valueSetsAreValidator struct { + elementValidators []validator.Set +} + +// Description describes the validation in plain text formatting. +func (v valueSetsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueSetsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateSet performs the validation. +func (v valueSetsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.SetTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Set values validator, however its values do not implement types.SetType or the types.SetTypable interface for custom Set types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.SetValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Set values validator, however its values do not implement types.SetType or the types.SetTypable interface for custom Set types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToSetValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.SetRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.SetResponse{} + + elementValidator.ValidateSet(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_strings_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_strings_are.go new file mode 100644 index 000000000..ead85b52d --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_strings_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueStringsAre returns an validator which ensures that any configured +// String values passes each String validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueStringsAre(elementValidators ...validator.String) validator.List { + return valueStringsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueStringsAreValidator{} + +// valueStringsAreValidator validates that each List member validates against each of the value validators. +type valueStringsAreValidator struct { + elementValidators []validator.String +} + +// Description describes the validation in plain text formatting. +func (v valueStringsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueStringsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v valueStringsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.StringTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a String values validator, however its values do not implement types.StringType or the types.StringTypable interface for custom String types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.StringValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a String values validator, however its values do not implement types.StringType or the types.StringTypable interface for custom String types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToStringValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.StringRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.StringResponse{} + + elementValidator.ValidateString(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/doc.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/doc.go new file mode 100644 index 000000000..22fa0a61e --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/doc.go @@ -0,0 +1,5 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// Package listplanmodifier provides plan modifiers for types.List attributes. +package listplanmodifier diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace.go new file mode 100644 index 000000000..eecf57bb4 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace.go @@ -0,0 +1,30 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplace returns a plan modifier that conditionally requires +// resource replacement if: +// +// - The resource is planned for update. +// - The plan and state values are not equal. +// +// Use RequiresReplaceIfConfigured if the resource replacement should +// only occur if there is a configuration value (ignore unconfigured drift +// detection changes). Use RequiresReplaceIf if the resource replacement +// should check provider-defined conditional logic. +func RequiresReplace() planmodifier.List { + return RequiresReplaceIf( + func(_ context.Context, _ planmodifier.ListRequest, resp *RequiresReplaceIfFuncResponse) { + resp.RequiresReplace = true + }, + "If the value of this attribute changes, Terraform will destroy and recreate the resource.", + "If the value of this attribute changes, Terraform will destroy and recreate the resource.", + ) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if.go new file mode 100644 index 000000000..840c5223b --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if.go @@ -0,0 +1,73 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplaceIf returns a plan modifier that conditionally requires +// resource replacement if: +// +// - The resource is planned for update. +// - The plan and state values are not equal. +// - The given function returns true. Returning false will not unset any +// prior resource replacement. +// +// Use RequiresReplace if the resource replacement should always occur on value +// changes. Use RequiresReplaceIfConfigured if the resource replacement should +// occur on value changes, but only if there is a configuration value (ignore +// unconfigured drift detection changes). +func RequiresReplaceIf(f RequiresReplaceIfFunc, description, markdownDescription string) planmodifier.List { + return requiresReplaceIfModifier{ + ifFunc: f, + description: description, + markdownDescription: markdownDescription, + } +} + +// requiresReplaceIfModifier is an plan modifier that sets RequiresReplace +// on the attribute if a given function is true. +type requiresReplaceIfModifier struct { + ifFunc RequiresReplaceIfFunc + description string + markdownDescription string +} + +// Description returns a human-readable description of the plan modifier. +func (m requiresReplaceIfModifier) Description(_ context.Context) string { + return m.description +} + +// MarkdownDescription returns a markdown description of the plan modifier. +func (m requiresReplaceIfModifier) MarkdownDescription(_ context.Context) string { + return m.markdownDescription +} + +// PlanModifyList implements the plan modification logic. +func (m requiresReplaceIfModifier) PlanModifyList(ctx context.Context, req planmodifier.ListRequest, resp *planmodifier.ListResponse) { + // Do not replace on resource creation. + if req.State.Raw.IsNull() { + return + } + + // Do not replace on resource destroy. + if req.Plan.Raw.IsNull() { + return + } + + // Do not replace if the plan and state values are equal. + if req.PlanValue.Equal(req.StateValue) { + return + } + + ifFuncResp := &RequiresReplaceIfFuncResponse{} + + m.ifFunc(ctx, req, ifFuncResp) + + resp.Diagnostics.Append(ifFuncResp.Diagnostics...) + resp.RequiresReplace = ifFuncResp.RequiresReplace +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_configured.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_configured.go new file mode 100644 index 000000000..81ffdb3d1 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_configured.go @@ -0,0 +1,34 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplaceIfConfigured returns a plan modifier that conditionally requires +// resource replacement if: +// +// - The resource is planned for update. +// - The plan and state values are not equal. +// - The configuration value is not null. +// +// Use RequiresReplace if the resource replacement should occur regardless of +// the presence of a configuration value. Use RequiresReplaceIf if the resource +// replacement should check provider-defined conditional logic. +func RequiresReplaceIfConfigured() planmodifier.List { + return RequiresReplaceIf( + func(_ context.Context, req planmodifier.ListRequest, resp *RequiresReplaceIfFuncResponse) { + if req.ConfigValue.IsNull() { + return + } + + resp.RequiresReplace = true + }, + "If the value of this attribute is configured and changes, Terraform will destroy and recreate the resource.", + "If the value of this attribute is configured and changes, Terraform will destroy and recreate the resource.", + ) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_func.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_func.go new file mode 100644 index 000000000..e6dabd6c2 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_func.go @@ -0,0 +1,25 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplaceIfFunc is a conditional function used in the RequiresReplaceIf +// plan modifier to determine whether the attribute requires replacement. +type RequiresReplaceIfFunc func(context.Context, planmodifier.ListRequest, *RequiresReplaceIfFuncResponse) + +// RequiresReplaceIfFuncResponse is the response type for a RequiresReplaceIfFunc. +type RequiresReplaceIfFuncResponse struct { + // Diagnostics report errors or warnings related to this logic. An empty + // or unset slice indicates success, with no warnings or errors generated. + Diagnostics diag.Diagnostics + + // RequiresReplace should be enabled if the resource should be replaced. + RequiresReplace bool +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/use_state_for_unknown.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/use_state_for_unknown.go new file mode 100644 index 000000000..c8b2f3bf5 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/use_state_for_unknown.go @@ -0,0 +1,55 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// UseStateForUnknown returns a plan modifier that copies a known prior state +// value into the planned value. Use this when it is known that an unconfigured +// value will remain the same after a resource update. +// +// To prevent Terraform errors, the framework automatically sets unconfigured +// and Computed attributes to an unknown value "(known after apply)" on update. +// Using this plan modifier will instead display the prior state value in the +// plan, unless a prior plan modifier adjusts the value. +func UseStateForUnknown() planmodifier.List { + return useStateForUnknownModifier{} +} + +// useStateForUnknownModifier implements the plan modifier. +type useStateForUnknownModifier struct{} + +// Description returns a human-readable description of the plan modifier. +func (m useStateForUnknownModifier) Description(_ context.Context) string { + return "Once set, the value of this attribute in state will not change." +} + +// MarkdownDescription returns a markdown description of the plan modifier. +func (m useStateForUnknownModifier) MarkdownDescription(_ context.Context) string { + return "Once set, the value of this attribute in state will not change." +} + +// PlanModifyList implements the plan modification logic. +func (m useStateForUnknownModifier) PlanModifyList(_ context.Context, req planmodifier.ListRequest, resp *planmodifier.ListResponse) { + // Do nothing if there is no state value. + if req.StateValue.IsNull() { + return + } + + // Do nothing if there is a known planned value. + if !req.PlanValue.IsUnknown() { + return + } + + // Do nothing if there is an unknown configuration value, otherwise interpolation gets messed up. + if req.ConfigValue.IsUnknown() { + return + } + + resp.PlanValue = req.StateValue +} diff --git a/vendor/modules.txt b/vendor/modules.txt index a66e2d30c..5cff09f01 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -165,6 +165,7 @@ github.com/hashicorp/terraform-plugin-framework/providerserver github.com/hashicorp/terraform-plugin-framework/resource github.com/hashicorp/terraform-plugin-framework/resource/schema github.com/hashicorp/terraform-plugin-framework/resource/schema/defaults +github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault @@ -180,6 +181,7 @@ github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes ## explicit; go 1.19 github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator +github.com/hashicorp/terraform-plugin-framework-validators/listvalidator github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator # github.com/hashicorp/terraform-plugin-go v0.20.0 ## explicit; go 1.20 From 45380548e68b5abd41cc1a9e9c3468095e0e5555 Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Wed, 8 May 2024 12:49:40 -0400 Subject: [PATCH 04/11] Remove omitempty from ScheduleActions in vendor/ --- vendor/github.com/PagerDuty/go-pagerduty/service.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/github.com/PagerDuty/go-pagerduty/service.go b/vendor/github.com/PagerDuty/go-pagerduty/service.go index 497b4c1dd..1af589982 100644 --- a/vendor/github.com/PagerDuty/go-pagerduty/service.go +++ b/vendor/github.com/PagerDuty/go-pagerduty/service.go @@ -90,7 +90,7 @@ type Service struct { Teams []Team `json:"teams,omitempty"` IncidentUrgencyRule *IncidentUrgencyRule `json:"incident_urgency_rule,omitempty"` SupportHours *SupportHours `json:"support_hours,omitempty"` - ScheduledActions []ScheduledAction `json:"scheduled_actions,omitempty"` + ScheduledActions []ScheduledAction `json:"scheduled_actions"` AlertCreation string `json:"alert_creation,omitempty"` AlertGrouping string `json:"alert_grouping,omitempty"` AlertGroupingTimeout *uint `json:"alert_grouping_timeout,omitempty"` From a00874ff61d72d56a876179cb8b2471457696ff8 Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Fri, 10 May 2024 06:27:10 -0400 Subject: [PATCH 05/11] wip --- pagerdutyplugin/provider_test.go | 14 - pagerdutyplugin/resource_pagerduty_service.go | 234 ++- .../resource_pagerduty_service_test.go | 1589 ++++++++--------- util/rangetypes/int64.go | 3 +- util/validate/require_a_if_b_equal.go | 36 +- .../PagerDuty/go-pagerduty/service.go | 4 +- 6 files changed, 981 insertions(+), 899 deletions(-) diff --git a/pagerdutyplugin/provider_test.go b/pagerdutyplugin/provider_test.go index f562e13bb..ea839e2b2 100644 --- a/pagerdutyplugin/provider_test.go +++ b/pagerdutyplugin/provider_test.go @@ -84,17 +84,3 @@ func testAccTimeNow() time.Time { } return util.TimeNowInLoc(name) } - -func testAccPreCheckPagerDutyAbility(t *testing.T, ability string) { - if v := os.Getenv("PAGERDUTY_TOKEN"); v == "" { - t.Fatal("PAGERDUTY_TOKEN must be set for acceptance tests") - } - if v := os.Getenv("PAGERDUTY_USER_TOKEN"); v == "" { - t.Fatal("PAGERDUTY_USER_TOKEN must be set for acceptance tests") - } - - ctx := context.Background() - if err := testAccProvider.client.TestAbilityWithContext(ctx, ability); err != nil { - t.Skipf("Missing ability: %s. Skipping test", ability) - } -} diff --git a/pagerdutyplugin/resource_pagerduty_service.go b/pagerdutyplugin/resource_pagerduty_service.go index 5d852cac8..f5ee15a1d 100644 --- a/pagerdutyplugin/resource_pagerduty_service.go +++ b/pagerdutyplugin/resource_pagerduty_service.go @@ -10,7 +10,6 @@ import ( "github.com/PagerDuty/go-pagerduty" "github.com/PagerDuty/terraform-provider-pagerduty/util" "github.com/PagerDuty/terraform-provider-pagerduty/util/enumtypes" - "github.com/PagerDuty/terraform-provider-pagerduty/util/rangetypes" "github.com/PagerDuty/terraform-provider-pagerduty/util/tztypes" "github.com/PagerDuty/terraform-provider-pagerduty/util/validate" "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" @@ -37,35 +36,9 @@ var ( _ resource.ResourceWithConfigure = (*resourceService)(nil) _ resource.ResourceWithConfigValidators = (*resourceService)(nil) _ resource.ResourceWithImportState = (*resourceService)(nil) + _ resource.ResourceWithValidateConfig = (*resourceService)(nil) ) -func (r *resourceService) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) -} - -func (r *resourceService) ConfigValidators(ctx context.Context) []resource.ConfigValidator { - return []resource.ConfigValidator{ - validate.Require( - path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), - ), - validate.RequireAIfBEqual( - path.Root("support_hours"), - path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), - types.StringValue("use_support_hours"), - ), - validate.ForbidAIfBEqualWithMessage( - path.Root("incident_urgency_rule").AtListIndex(0).AtName("urgency"), - path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), - types.StringValue("use_support_hours"), - "general urgency cannot be set for a use_support_hours incident urgency rule type", - ), - validate.RequireList(path.Root("alert_grouping_parameters").AtListIndex(0).AtName("config")), - validate.RequireList(path.Root("incident_urgency_rule").AtListIndex(0).AtName("during_support_hours")), - validate.RequireList(path.Root("incident_urgency_rule").AtListIndex(0).AtName("outside_support_hours")), - validate.RequireList(path.Root("support_hours").AtListIndex(0).AtName("days_of_week")), // TODO at most 7 - } -} - func (r *resourceService) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = "pagerduty_service" } @@ -74,10 +47,8 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, + Computed: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, "name": schema.StringAttribute{ @@ -91,9 +62,10 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest }, "acknowledgement_timeout": schema.StringAttribute{ - Computed: true, - Optional: true, - Default: stringdefault.StaticString("1800"), + Computed: true, + Optional: true, + Default: stringdefault.StaticString("1800"), + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, "alert_creation": schema.StringAttribute{ @@ -103,6 +75,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest Validators: []validator.String{ stringvalidator.OneOf("create_alerts_and_incidents", "create_incidents"), }, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, "alert_grouping": schema.StringAttribute{ @@ -113,6 +86,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest stringvalidator.ConflictsWith(path.MatchRoot("alert_grouping_parameters")), }, DeprecationMessage: "Use `alert_grouping_parameters.type`", + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, "alert_grouping_timeout": schema.StringAttribute{ @@ -122,27 +96,52 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest Validators: []validator.String{ stringvalidator.ConflictsWith(path.MatchRoot("alert_grouping_parameters")), }, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, "auto_resolve_timeout": schema.StringAttribute{ - Computed: true, - Optional: true, - Default: stringdefault.StaticString("14400"), + Computed: true, + Optional: true, + Default: stringdefault.StaticString("14400"), + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, "description": schema.StringAttribute{ - Optional: true, - Computed: true, - Default: stringdefault.StaticString("Managed by Terraform"), + Optional: true, + Computed: true, + Default: stringdefault.StaticString("Managed by Terraform"), + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, - "created_at": schema.StringAttribute{Computed: true}, - "escalation_policy": schema.StringAttribute{Required: true}, - "html_url": schema.StringAttribute{Computed: true}, - "last_incident_timestamp": schema.StringAttribute{Computed: true}, - "response_play": schema.StringAttribute{Computed: true, Optional: true}, - "status": schema.StringAttribute{Computed: true}, - "type": schema.StringAttribute{Computed: true}, + "created_at": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, + "escalation_policy": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, + "html_url": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, + "last_incident_timestamp": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, + "response_play": schema.StringAttribute{ + Computed: true, + Optional: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, + "status": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, + "type": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, "alert_grouping_parameters": schema.ListAttribute{ Optional: true, @@ -161,12 +160,13 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest "timeout": types.Int64Type, "fields": types.ListType{ElemType: types.StringType}, "aggregate": alertGroupingParametersConfigAggregateType, - "time_window": alertGroupingParametersConfigTimeWindowType, + "time_window": types.Int64Type, }, }, }, }, }, + PlanModifiers: []planmodifier.List{listplanmodifier.UseStateForUnknown()}, }, "auto_pause_notifications_parameters": schema.ListAttribute{ @@ -182,6 +182,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest "timeout": autoPauseNotificationsParametersTimeoutType, }, }, + PlanModifiers: []planmodifier.List{listplanmodifier.UseStateForUnknown()}, }, "incident_urgency_rule": schema.ListAttribute{ @@ -213,6 +214,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest }, }, }, + PlanModifiers: []planmodifier.List{listplanmodifier.UseStateForUnknown()}, }, "scheduled_actions": schema.ListAttribute{ @@ -234,6 +236,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest }, PlanModifiers: []planmodifier.List{ listplanmodifier.RequiresReplace(), + listplanmodifier.UseStateForUnknown(), }, Validators: []validator.List{ listvalidator.SizeBetween(1, 1), @@ -248,6 +251,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest }, PlanModifiers: []planmodifier.List{ listplanmodifier.RequiresReplace(), + listplanmodifier.UseStateForUnknown(), }, ElementType: types.ObjectType{ AttrTypes: map[string]attr.Type{ @@ -265,8 +269,81 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest } } -func (r *resourceService) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +func (r *resourceService) ConfigValidators(ctx context.Context) []resource.ConfigValidator { + return []resource.ConfigValidator{ + validate.Require( + path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), + ), + validate.RequireAIfBEqual( + path.Root("support_hours"), + path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), + types.StringValue("use_support_hours"), + ), + validate.ForbidAIfBEqualWithMessage( + path.Root("incident_urgency_rule").AtListIndex(0).AtName("urgency"), + path.Root("incident_urgency_rule").AtListIndex(0).AtName("type"), + types.StringValue("use_support_hours"), + "general urgency cannot be set for a use_support_hours incident urgency rule type", + ), + validate.RequireList(path.Root("alert_grouping_parameters").AtListIndex(0).AtName("config")), + validate.RequireList(path.Root("incident_urgency_rule").AtListIndex(0).AtName("during_support_hours")), + validate.RequireList(path.Root("incident_urgency_rule").AtListIndex(0).AtName("outside_support_hours")), + validate.RequireList(path.Root("support_hours").AtListIndex(0).AtName("days_of_week")), // TODO at most 7 + } +} + +func (r *resourceService) ValidateConfig(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + configPath := path.Root("alert_grouping_parameters").AtListIndex(0).AtName("config").AtListIndex(0) + + // Validate time window + var timeWindow types.Int64 + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, configPath.AtName("time_window"), &timeWindow)...) + if resp.Diagnostics.HasError() { + return + } + if !timeWindow.IsNull() && !timeWindow.IsUnknown() { + if tw := timeWindow.ValueInt64(); tw < 300 || tw > 3600 { + resp.Diagnostics.AddAttributeError( + configPath.AtName("time_window"), + "Alert grouping time window value must be between 300 and 3600", + fmt.Sprintf("Current setting is %d", tw), + ) + } + } + + // Validate Alert Grouping Parameters + var aggregate types.String + var fields types.List + var timeout types.Int64 + var pType types.String + + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, path.Root("alert_grouping_parameters").AtListIndex(0).AtName("type"), &pType)...) + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, configPath.AtName("aggregate"), &aggregate)...) + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, configPath.AtName("fields"), &fields)...) + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, configPath.AtName("timeout"), &timeout)...) + if resp.Diagnostics.HasError() { + return + } + + if pType.ValueString() == "content_based" && (aggregate.ValueString() == "" || len(fields.Elements()) == 0) { + resp.Diagnostics.AddError(`When using Alert grouping parameters configuration of type "content_based" is in use, attributes "aggregate" and "fields" are required`, "") + return + } + + if !pType.IsNull() && pType.ValueString() != "content_based" && (aggregate.ValueString() != "" || len(fields.Elements()) > 0) { + resp.Diagnostics.AddError(`Alert grouping parameters configuration attributes "aggregate" and "fields" are only supported by "content_based" type Alert Grouping`, "") + return + } + + if !pType.IsNull() && pType.ValueString() != "time" && timeout.ValueInt64() > 0 { + resp.Diagnostics.AddError(`Alert grouping parameters configuration attribute "timeout" is only supported by "time" type Alert Grouping`, "") + return + } + + if !pType.IsNull() && (pType.ValueString() != "intelligent" && pType.ValueString() != "content_based") && timeWindow.ValueInt64() > 300 { + resp.Diagnostics.AddError(`Alert grouping parameters configuration attribute "time_window" is only supported by "intelligent" and "content-based" type Alert Grouping`, "") + return + } } func (r *resourceService) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { @@ -417,6 +494,14 @@ func (r *resourceService) Delete(ctx context.Context, req resource.DeleteRequest resp.State.RemoveResource(ctx) } +func (r *resourceService) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) +} + +func (r *resourceService) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + type resourceServiceModel struct { ID types.String `tfsdk:"id"` AcknowledgementTimeout types.String `tfsdk:"acknowledgement_timeout"` @@ -496,6 +581,10 @@ func buildAlertGroupingParameters(ctx context.Context, list types.List, diags *d } func buildAlertGroupingConfig(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.AlertGroupParamsConfig { + if list.IsNull() || list.IsUnknown() { + return nil + } + var target []struct { Timeout types.Int64 `tfsdk:"timeout"` Aggregate types.String `tfsdk:"aggregate"` @@ -508,7 +597,8 @@ func buildAlertGroupingConfig(ctx context.Context, list types.List, diags *diag. } obj := target[0] - ut := uint(obj.Timeout.ValueInt64()) + timeout := uint(obj.Timeout.ValueInt64()) + timeWindow := uint(obj.TimeWindow.ValueInt64()) var fields []string if d := obj.Fields.ElementsAs(ctx, &fields, false); d.HasError() { @@ -517,9 +607,10 @@ func buildAlertGroupingConfig(ctx context.Context, list types.List, diags *diag. } return &pagerduty.AlertGroupParamsConfig{ - Timeout: &ut, - Aggregate: obj.Aggregate.ValueString(), - Fields: fields, + Timeout: &timeout, + Aggregate: obj.Aggregate.ValueString(), + Fields: fields, + TimeWindow: &timeWindow, } } @@ -544,9 +635,12 @@ func buildAutoPauseNotificationsParameters(ctx context.Context, list types.List, } func buildIncidentUrgencyRule(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.IncidentUrgencyRule { + defaultValue := &pagerduty.IncidentUrgencyRule{Type: "constant", Urgency: "high"} + if list.IsNull() || list.IsUnknown() { - return nil + return defaultValue } + var target []struct { Type types.String `tfsdk:"type"` Urgency types.String `tfsdk:"urgency"` @@ -555,16 +649,16 @@ func buildIncidentUrgencyRule(ctx context.Context, list types.List, diags *diag. } if d := list.ElementsAs(ctx, &target, false); d.HasError() { diags.Append(d...) - return nil + return defaultValue } obj := target[0] - incidentUrgencyRule := &pagerduty.IncidentUrgencyRule{ - Type: obj.Type.ValueString(), - Urgency: obj.Urgency.ValueString(), + + return &pagerduty.IncidentUrgencyRule{ + Type: obj.Type.ValueString(), + Urgency: obj.Urgency.ValueString(), + DuringSupportHours: buildIncidentUrgencyType(ctx, obj.DuringSupportHours, diags), + OutsideSupportHours: buildIncidentUrgencyType(ctx, obj.OutsideSupportHours, diags), } - incidentUrgencyRule.DuringSupportHours = buildIncidentUrgencyType(ctx, obj.DuringSupportHours, diags) - incidentUrgencyRule.OutsideSupportHours = buildIncidentUrgencyType(ctx, obj.OutsideSupportHours, diags) - return incidentUrgencyRule } func buildIncidentUrgencyType(ctx context.Context, list types.List, diags *diag.Diagnostics) *pagerduty.IncidentUrgencyType { @@ -673,8 +767,6 @@ var ( OneOf: []string{"time", "intelligent", "content_based"}} alertGroupingParametersConfigAggregateType = enumtypes.StringType{ OneOf: []string{"all", "any"}} - alertGroupingParametersConfigTimeWindowType = rangetypes.Int64Type{ - Start: 300, End: 3600} autoPauseNotificationsParametersTimeoutType = enumtypes.Int64Type{ OneOf: []int64{120, 180, 300, 600, 900}} ) @@ -736,7 +828,7 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert "aggregate": alertGroupingParametersConfigAggregateType, "fields": types.ListType{ElemType: types.StringType}, "timeout": types.Int64Type, - "time_window": alertGroupingParametersConfigTimeWindowType, + "time_window": types.Int64Type, }, } alertGroupingParametersObjectType := types.ObjectType{ @@ -752,6 +844,7 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert } configList := types.ListNull(alertGroupParamsConfigObjectType) + log.Printf("[CG] config %#v", params.Config) if params.Config != nil { fieldsList, d := types.ListValueFrom(ctx, types.StringType, params.Config.Fields) if d.HasError() { @@ -759,11 +852,16 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert return nullList } - var timeout types.Int64 + timeout := types.Int64Null() if params.Config.Timeout != nil { timeout = types.Int64Value(int64(*params.Config.Timeout)) } + timeWindow := types.Int64Null() + if params.Config.TimeWindow != nil { + timeWindow = types.Int64Value(int64(*params.Config.TimeWindow)) + } + aggregate := enumtypes.NewStringNull(alertGroupingParametersConfigAggregateType) if params.Config.Aggregate != "" { aggregate = enumtypes.NewStringValue(params.Config.Aggregate, alertGroupingParametersConfigAggregateType) @@ -773,7 +871,7 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert "aggregate": aggregate, "fields": fieldsList, "timeout": timeout, - "time_window": types.Int64Null(), // TODO + "time_window": timeWindow, }) if d.HasError() { diags.Append(d...) diff --git a/pagerdutyplugin/resource_pagerduty_service_test.go b/pagerdutyplugin/resource_pagerduty_service_test.go index 7950c0b3e..dbaf8dabc 100644 --- a/pagerdutyplugin/resource_pagerduty_service_test.go +++ b/pagerdutyplugin/resource_pagerduty_service_test.go @@ -135,182 +135,169 @@ func TestAccPagerDutyService_FormatValidation(t *testing.T) { ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ - // Just a valid name + // 1. Just a valid name { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, "DB Technical Service"), PlanOnly: true, ExpectNonEmptyPlan: true, }, - // Blank Name + // 2. Blank Name { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, ""), PlanOnly: true, ExpectError: regexp.MustCompile(errMessageMatcher), }, - // Name with one white space at the end + // 3. Name with one white space at the end { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, "this name has a white space at the end "), PlanOnly: true, ExpectError: regexp.MustCompile(errMessageMatcher), }, - // Name with multiple white space at the end + // 4. Name with multiple white space at the end { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, "this name has white spaces at the end "), PlanOnly: true, ExpectError: regexp.MustCompile(errMessageMatcher), }, - // Name with non printable characters + // 5. Name with non printable characters { Config: testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, "this name has a non printable\\n character"), PlanOnly: true, ExpectError: regexp.MustCompile(errMessageMatcher), }, - // Alert grouping parameters "Content Based" type input validation + // 6. Alert grouping parameters "Content Based" type input validation empty { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "content_based" - config {} - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "content_based" + config {} +}`, ), - PlanOnly: true, - ExpectError: regexp.MustCompile("When using Alert grouping parameters configuration of type \"content_based\" is in use, attributes \"aggregate\" and \"fields\" are required"), + PlanOnly: true, + ExpectError: regexp.MustCompile("When using Alert grouping parameters configuration of type \"content_based\" is" + + `\s+in use, attributes "aggregate" and "fields" are required`), }, + // 7. Alert grouping parameters "Content Based" type input valid { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "content_based" - config { - aggregate = "all" - fields = ["custom_details.source_id"] - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "content_based" + config { + aggregate = "all" + fields = ["custom_details.source_id"] + } +}`, ), }, + // 8. Alert grouping parameters in non "content_based" has "aggreate" or "fields" { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "time" - config { - aggregate = "all" - fields = ["custom_details.source_id"] - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "time" + config { + aggregate = "all" + fields = ["custom_details.source_id"] + } +}`, ), PlanOnly: true, ExpectError: regexp.MustCompile("Alert grouping parameters configuration attributes \"aggregate\" and \"fields\" are only supported by \"content_based\" type Alert Grouping"), }, - // Alert grouping parameters "time" type input validation + // 9. Alert grouping parameters "time" type input validation { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "time" - config { - timeout = 5 - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "time" + config { + timeout = 5 + } +}`, ), }, + // 10. Alert grouping parameters "intelligent" type input validation has bad "timeout" { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "intelligent" - config { - timeout = 5 - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "intelligent" + config { + timeout = 5 + } +}`, ), PlanOnly: true, ExpectError: regexp.MustCompile("Alert grouping parameters configuration attribute \"timeout\" is only supported by \"time\" type Alert Grouping"), }, - // Alert grouping parameters "intelligent" type input validation + // 11. Alert grouping parameters "intelligent" type input validation { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "time" - config { - time_window = 600 - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "time" + config { + time_window = 600 + } +}`, ), PlanOnly: true, ExpectError: regexp.MustCompile("Alert grouping parameters configuration attribute \"time_window\" is only supported by \"intelligent\" and \"content-based\" type Alert Grouping"), }, + // 12. { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "intelligent" - config {} - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "intelligent" + config {} +}`, ), }, + // 13. { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "intelligent" - config { - time_window = 5 - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "intelligent" + config { + time_window = 5 + } +}`, ), PlanOnly: true, ExpectError: regexp.MustCompile("Alert grouping time window value must be between 300 and 3600"), }, + // 14. { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "intelligent" - config { - time_window = 300 - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "intelligent" + config { + time_window = 300 + } +}`, ), PlanOnly: true, }, + // 15. { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "content_based" - config { - time_window = 5 - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "content_based" + config { + time_window = 5 + } +}`, ), PlanOnly: true, ExpectError: regexp.MustCompile("Alert grouping time window value must be between 300 and 3600"), }, + // 16. { - Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, - ` - alert_grouping_parameters { - type = "content_based" - config { - aggregate = "all" - fields = ["custom_details.source_id"] - time_window = 300 - } - } - `, + Config: testAccCheckPagerDutyServiceAlertGroupingInputValidationConfig(username, email, escalationPolicy, service, ` +alert_grouping_parameters { + type = "content_based" + config { + aggregate = "all" + fields = ["custom_details.source_id"] + time_window = 300 + } +}`, ), }, }, @@ -324,7 +311,8 @@ func TestAccPagerDutyService_AlertGrouping(t *testing.T) { service := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t); testAccPreCheckPagerDutyAbility(t, "preview_intelligent_alert_grouping") }, + // PreCheck: func() { testAccPreCheck(t); testAccPreCheckPagerDutyAbility(t, "preview_intelligent_alert_grouping") }, // TODO + PreCheck: func() { testAccPreCheck(t) }, ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), CheckDestroy: testAccCheckPagerDutyServiceDestroy, Steps: []resource.TestStep{ @@ -384,7 +372,7 @@ func TestAccPagerDutyService_AlertGrouping(t *testing.T) { }) } -func TestAccPagerDutyService_AlertContentGrouping(t *testing.T) { +func TestAccPagerDutyService_AlertContentGrouping_Basic(t *testing.T) { username := fmt.Sprintf("tf-%s", acctest.RandString(5)) email := fmt.Sprintf("%s@foo.test", username) escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) @@ -617,7 +605,7 @@ func TestAccPagerDutyService_AlertContentGrouping(t *testing.T) { }) } -func TestAccPagerDutyService_AlertContentGroupingIntelligentTimeWindow(t *testing.T) { +func TestAccPagerDutyService_AlertContentGrouping_IntelligentTimeWindow(t *testing.T) { username := fmt.Sprintf("tf-%s", acctest.RandString(5)) email := fmt.Sprintf("%s@foo.test", username) escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) @@ -1272,33 +1260,33 @@ func testAccCheckPagerDutyServiceResponsePlayNotExist(n string) resource.TestChe func testAccCheckPagerDutyServiceConfig(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id } `, username, email, escalationPolicy, service) } @@ -1342,36 +1330,36 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertGrouping(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping = "time" - alert_grouping_timeout = 1800 + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping = "time" + alert_grouping_timeout = 1800 } `, username, email, escalationPolicy, service) } @@ -1379,41 +1367,41 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertContentGrouping(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "content_based" - config { - aggregate = "all" - fields = ["custom_details.field1"] - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "content_based" + config { + aggregate = "all" + fields = ["custom_details.field1"] } + } } `, username, email, escalationPolicy, service) } @@ -1421,77 +1409,78 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertContentGroupingIntelligentTimeWindow(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "intelligent" - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "intelligent" + } } `, username, email, escalationPolicy, service) } + func testAccCheckPagerDutyServiceConfigWithAlertContentGroupingIntelligentTimeWindowUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "intelligent" - config { - time_window = 900 - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "intelligent" + config { + time_window = 900 } + } } `, username, email, escalationPolicy, service) } @@ -1499,37 +1488,37 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertContentGroupingUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = null - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = null + } } `, username, email, escalationPolicy, service) } @@ -1537,40 +1526,40 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertTimeGroupingUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "time" - config { - timeout = 5 - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "time" + config { + timeout = 5 } + } } `, username, email, escalationPolicy, service) } @@ -1578,40 +1567,40 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertTimeGroupingTimeoutZeroUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "time" - config { - timeout = 0 - } - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "time" + config { + timeout = 0 + } + } } `, username, email, escalationPolicy, service) } @@ -1619,36 +1608,36 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertGroupingUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping = "intelligent" - alert_grouping_timeout = 1900 + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping = "intelligent" + alert_grouping_timeout = 1900 } `, username, email, escalationPolicy, service) } @@ -1656,80 +1645,80 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertIntelligentGroupingUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } -} - -resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "intelligent" - config { - fields = null - timeout = 0 - } - } -} -`, username, email, escalationPolicy, service) -} - + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} + +resource "pagerduty_service" "foo" { + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "intelligent" + config { + fields = null + timeout = 0 + } + } +} +`, username, email, escalationPolicy, service) +} + func testAccCheckPagerDutyServiceConfigWithAlertIntelligentGroupingDescriptionUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "bar" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "intelligent" - config {} - } + name = "%s" + description = "bar" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "intelligent" + config {} + } } `, username, email, escalationPolicy, service) } @@ -1737,37 +1726,37 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertIntelligentGroupingOmittingConfig(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "bar" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = "intelligent" - } + name = "%s" + description = "bar" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = "intelligent" + } } `, username, email, escalationPolicy, service) } @@ -1775,38 +1764,38 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAlertIntelligentGroupingTypeNullEmptyConfigConfig(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "bar" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - alert_grouping_parameters { - type = null - config {} - } + name = "%s" + description = "bar" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + alert_grouping_parameters { + type = null + config {} + } } `, username, email, escalationPolicy, service) } @@ -1814,38 +1803,38 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAutoPauseNotificationsParameters(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - auto_pause_notifications_parameters { - enabled = true - timeout = 300 - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + auto_pause_notifications_parameters { + enabled = true + timeout = 300 + } } `, username, email, escalationPolicy, service) } @@ -1853,38 +1842,38 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAutoPauseNotificationsParametersUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" - auto_pause_notifications_parameters { - enabled = false - timeout = null - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" + auto_pause_notifications_parameters { + enabled = false + timeout = null + } } `, username, email, escalationPolicy, service) } @@ -1892,34 +1881,34 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigWithAutoPauseNotificationsParametersRemoved(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - alert_creation = "create_alerts_and_incidents" + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_alerts_and_incidents" } `, username, email, escalationPolicy, service) } @@ -1927,39 +1916,39 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "bar" - auto_resolve_timeout = 3600 - acknowledgement_timeout = 3600 - - escalation_policy = pagerduty_escalation_policy.foo.id - incident_urgency_rule { - type = "constant" - urgency = "high" - } + name = "%s" + description = "bar" + auto_resolve_timeout = 3600 + acknowledgement_timeout = 3600 + + escalation_policy = pagerduty_escalation_policy.foo.id + incident_urgency_rule { + type = "constant" + urgency = "high" + } } `, username, email, escalationPolicy, service) } @@ -1967,39 +1956,39 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceConfigUpdatedWithDisabledTimeouts(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "bar" - auto_resolve_timeout = "null" - acknowledgement_timeout = "null" - - escalation_policy = pagerduty_escalation_policy.foo.id - incident_urgency_rule { - type = "constant" - urgency = "high" - } + name = "%s" + description = "bar" + auto_resolve_timeout = "null" + acknowledgement_timeout = "null" + + escalation_policy = pagerduty_escalation_policy.foo.id + incident_urgency_rule { + type = "constant" + urgency = "high" + } } `, username, email, escalationPolicy, service) } @@ -2007,64 +1996,64 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceWithIncidentUrgencyRulesConfig(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - - incident_urgency_rule { - type = "use_support_hours" - - during_support_hours { - type = "constant" - urgency = "high" - } - outside_support_hours { - type = "constant" - urgency = "low" - } - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id - support_hours { - type = "fixed_time_per_day" - time_zone = "America/Lima" - start_time = "09:00:00" - end_time = "17:00:00" - days_of_week = [ 1, 2, 3, 4, 5 ] - } + incident_urgency_rule { + type = "use_support_hours" - scheduled_actions { - type = "urgency_change" - to_urgency = "high" - at { - type = "named_time" - name = "support_hours_start" - } - } + during_support_hours { + type = "constant" + urgency = "high" + } + outside_support_hours { + type = "constant" + urgency = "low" + } + } + + support_hours { + type = "fixed_time_per_day" + time_zone = "America/Lima" + start_time = "09:00:00" + end_time = "17:00:00" + days_of_week = [ 1, 2, 3, 4, 5 ] + } + + scheduled_actions { + type = "urgency_change" + to_urgency = "high" + at { + type = "named_time" + name = "support_hours_start" + } + } } `, username, email, escalationPolicy, service) } @@ -2072,64 +2061,64 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceWithIncidentUrgencyRulesConfigError(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - - incident_urgency_rule { - type = "use_support_hours" - urgency = "high" - during_support_hours { - type = "constant" - urgency = "high" - } - outside_support_hours { - type = "constant" - urgency = "low" - } - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id - support_hours { - type = "fixed_time_per_day" - time_zone = "America/Lima" - start_time = "09:00:00" - end_time = "17:00:00" - days_of_week = [ 1, 2, 3, 4, 5 ] - } + incident_urgency_rule { + type = "use_support_hours" + urgency = "high" + during_support_hours { + type = "constant" + urgency = "high" + } + outside_support_hours { + type = "constant" + urgency = "low" + } + } - scheduled_actions { - type = "urgency_change" - to_urgency = "high" - at { - type = "named_time" - name = "support_hours_start" - } - } + support_hours { + type = "fixed_time_per_day" + time_zone = "America/Lima" + start_time = "09:00:00" + end_time = "17:00:00" + days_of_week = [ 1, 2, 3, 4, 5 ] + } + + scheduled_actions { + type = "urgency_change" + to_urgency = "high" + at { + type = "named_time" + name = "support_hours_start" + } + } } `, username, email, escalationPolicy, service) } @@ -2137,119 +2126,118 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceWithIncidentUrgencyRulesWithoutScheduledActionsConfig(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - - incident_urgency_rule { - type = "use_support_hours" - - during_support_hours { - type = "constant" - urgency = "high" - } - outside_support_hours { - type = "constant" - urgency = "severity_based" - } - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id - support_hours { - type = "fixed_time_per_day" - time_zone = "America/Lima" - start_time = "09:00:00" - end_time = "17:00:00" - days_of_week = [ 1, 2, 3, 4, 5 ] - } + incident_urgency_rule { + type = "use_support_hours" + during_support_hours { + type = "constant" + urgency = "high" + } + outside_support_hours { + type = "constant" + urgency = "severity_based" + } + } + + support_hours { + type = "fixed_time_per_day" + time_zone = "America/Lima" + start_time = "09:00:00" + end_time = "17:00:00" + days_of_week = [ 1, 2, 3, 4, 5 ] + } } `, username, email, escalationPolicy, service) } func testAccCheckPagerDutyServiceWithIncidentUrgencyRulesConfigUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` - resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "bar bar bar" - auto_resolve_timeout = 3600 - acknowledgement_timeout = 3600 - escalation_policy = pagerduty_escalation_policy.foo.id - - incident_urgency_rule { - type = "use_support_hours" - during_support_hours { - type = "constant" - urgency = "high" - } - outside_support_hours { - type = "constant" - urgency = "low" - } - } + name = "%s" + description = "bar bar bar" + auto_resolve_timeout = 3600 + acknowledgement_timeout = 3600 + escalation_policy = pagerduty_escalation_policy.foo.id - support_hours { - type = "fixed_time_per_day" - time_zone = "America/Lima" - start_time = "09:00:00" - end_time = "17:00:00" - days_of_week = [ 1, 2, 3, 4, 5 ] - } + incident_urgency_rule { + type = "use_support_hours" + during_support_hours { + type = "constant" + urgency = "high" + } + outside_support_hours { + type = "constant" + urgency = "low" + } + } - scheduled_actions { - type = "urgency_change" - to_urgency = "high" - at { - type = "named_time" - name = "support_hours_start" - } - } + support_hours { + type = "fixed_time_per_day" + time_zone = "America/Lima" + start_time = "09:00:00" + end_time = "17:00:00" + days_of_week = [ 1, 2, 3, 4, 5 ] + } + + scheduled_actions { + type = "urgency_change" + to_urgency = "high" + at { + type = "named_time" + name = "support_hours_start" + } + } } `, username, email, escalationPolicy, service) } @@ -2257,40 +2245,39 @@ resource "pagerduty_service" "foo" { func testAccCheckPagerDutyServiceWithSupportHoursConfigUpdated(username, email, escalationPolicy, service string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { - name = "%s" - email = "%s" - color = "green" - role = "user" - job_title = "foo" - description = "foo" + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" } resource "pagerduty_escalation_policy" "foo" { - name = "%s" - description = "bar" - num_loops = 2 - - rule { - escalation_delay_in_minutes = 10 - target { - type = "user_reference" - id = pagerduty_user.foo.id - } - } + name = "%s" + description = "bar" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } } resource "pagerduty_service" "foo" { - name = "%s" - description = "foo" - auto_resolve_timeout = 1800 - acknowledgement_timeout = 1800 - escalation_policy = pagerduty_escalation_policy.foo.id - - incident_urgency_rule { - type = "constant" - urgency = "high" - } + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + incident_urgency_rule { + type = "constant" + urgency = "high" + } } `, username, email, escalationPolicy, service) } diff --git a/util/rangetypes/int64.go b/util/rangetypes/int64.go index c23467fb1..3bebbc192 100644 --- a/util/rangetypes/int64.go +++ b/util/rangetypes/int64.go @@ -81,8 +81,7 @@ func (t Int64Type) Validate(ctx context.Context, in tftypes.Value, path path.Pat diags.AddAttributeError( path, "Invalid Int64 Value", - fmt.Sprintf("A value was provided that is not inside valid range (%v, %v).\n"+ - "Given Value: %v", t.Start, t.End, valueInt64), + fmt.Sprintf("Provided a value of '%v' that is not inside valid range [%v, %v].", valueInt64, t.Start, t.End), ) return } diff --git a/util/validate/require_a_if_b_equal.go b/util/validate/require_a_if_b_equal.go index 4ad8d4e95..d815bb494 100644 --- a/util/validate/require_a_if_b_equal.go +++ b/util/validate/require_a_if_b_equal.go @@ -12,16 +12,28 @@ import ( // RequireAIfBEqual checks path `a` is not null when path `b` is equal to `expected`. func RequireAIfBEqual(a, b path.Path, expected attr.Value) resource.ConfigValidator { return &requireIfEqual{ - dst: a, - src: b, - expected: expected, + dst: a, + src: b, + exp: expected, + } +} + +// RequireAIfBEqualWithMessage checks path `a` is not null when path `b` is +// equal to `expected`. Raises error message `msg` when `a` is null. +func RequireAIfBEqualWithMessage(a, b path.Path, expected attr.Value, msg string) resource.ConfigValidator { + return &requireIfEqual{ + dst: a, + src: b, + exp: expected, + msg: msg, } } type requireIfEqual struct { - dst path.Path - src path.Path - expected attr.Value + dst path.Path + src path.Path + exp attr.Value + msg string } func (v *requireIfEqual) Description(ctx context.Context) string { return "" } @@ -38,18 +50,18 @@ func (v *requireIfEqual) ValidateResource(ctx context.Context, req resource.Vali return } - if src.Equal(v.expected) { + if src.Equal(v.exp) { var dst attr.Value resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.dst, &dst)...) if resp.Diagnostics.HasError() { return } if dst.IsNull() { - resp.Diagnostics.AddAttributeError( - v.dst, - fmt.Sprintf("Required %s", v.dst), - fmt.Sprintf("When the value of %s equals %s, field %s must have an explicit value", v.src, v.expected, v.dst), - ) + detail := v.msg + if detail == "" { + detail = fmt.Sprintf("When the value of %s equals %s, field %s must have an explicit value", v.src, v.exp, v.dst) + } + resp.Diagnostics.AddAttributeError(v.dst, fmt.Sprintf("Required %s", v.dst), detail) return } } diff --git a/vendor/github.com/PagerDuty/go-pagerduty/service.go b/vendor/github.com/PagerDuty/go-pagerduty/service.go index 1af589982..d877a63eb 100644 --- a/vendor/github.com/PagerDuty/go-pagerduty/service.go +++ b/vendor/github.com/PagerDuty/go-pagerduty/service.go @@ -88,8 +88,8 @@ type Service struct { Integrations []Integration `json:"integrations,omitempty"` EscalationPolicy EscalationPolicy `json:"escalation_policy,omitempty"` Teams []Team `json:"teams,omitempty"` - IncidentUrgencyRule *IncidentUrgencyRule `json:"incident_urgency_rule,omitempty"` - SupportHours *SupportHours `json:"support_hours,omitempty"` + IncidentUrgencyRule *IncidentUrgencyRule `json:"incident_urgency_rule"` + SupportHours *SupportHours `json:"support_hours"` ScheduledActions []ScheduledAction `json:"scheduled_actions"` AlertCreation string `json:"alert_creation,omitempty"` AlertGrouping string `json:"alert_grouping,omitempty"` From c6b98fceca3f3ffa88840b74ab5b4e3872c11c8e Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Fri, 10 May 2024 06:50:21 -0400 Subject: [PATCH 06/11] Move var to end of file in pagerdutyplugin/service --- pagerdutyplugin/resource_pagerduty_service.go | 84 ++++++++++--------- 1 file changed, 43 insertions(+), 41 deletions(-) diff --git a/pagerdutyplugin/resource_pagerduty_service.go b/pagerdutyplugin/resource_pagerduty_service.go index f5ee15a1d..82e7150b2 100644 --- a/pagerdutyplugin/resource_pagerduty_service.go +++ b/pagerdutyplugin/resource_pagerduty_service.go @@ -762,15 +762,6 @@ func buildSupportHours(ctx context.Context, list types.List, diags *diag.Diagnos return supportHours } -var ( - alertGroupingParametersTypeType = enumtypes.StringType{ - OneOf: []string{"time", "intelligent", "content_based"}} - alertGroupingParametersConfigAggregateType = enumtypes.StringType{ - OneOf: []string{"all", "any"}} - autoPauseNotificationsParametersTimeoutType = enumtypes.Int64Type{ - OneOf: []int64{120, 180, 300, 600, 900}} -) - func flattenService(ctx context.Context, service *pagerduty.Service, state resourceServiceModel, diags *diag.Diagnostics) resourceServiceModel { model := resourceServiceModel{ ID: types.StringValue(service.ID), @@ -823,27 +814,12 @@ func flattenService(ctx context.Context, service *pagerduty.Service, state resou } func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.AlertGroupingParameters, diags *diag.Diagnostics) types.List { - alertGroupParamsConfigObjectType := types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "aggregate": alertGroupingParametersConfigAggregateType, - "fields": types.ListType{ElemType: types.StringType}, - "timeout": types.Int64Type, - "time_window": types.Int64Type, - }, - } - alertGroupingParametersObjectType := types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "type": alertGroupingParametersTypeType, - "config": types.ListType{ElemType: alertGroupParamsConfigObjectType}, - }, - } - nullList := types.ListNull(alertGroupingParametersObjectType) if params == nil { return nullList } - configList := types.ListNull(alertGroupParamsConfigObjectType) + configList := types.ListNull(alertGroupingParametersConfigObjectType) log.Printf("[CG] config %#v", params.Config) if params.Config != nil { fieldsList, d := types.ListValueFrom(ctx, types.StringType, params.Config.Fields) @@ -867,7 +843,7 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert aggregate = enumtypes.NewStringValue(params.Config.Aggregate, alertGroupingParametersConfigAggregateType) } - configObj, d := types.ObjectValue(alertGroupParamsConfigObjectType.AttrTypes, map[string]attr.Value{ + configObj, d := types.ObjectValue(alertGroupingParametersConfigObjectType.AttrTypes, map[string]attr.Value{ "aggregate": aggregate, "fields": fieldsList, "timeout": timeout, @@ -877,7 +853,7 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert diags.Append(d...) return nullList } - configList, d = types.ListValue(alertGroupParamsConfigObjectType, []attr.Value{configObj}) + configList, d = types.ListValue(alertGroupingParametersConfigObjectType, []attr.Value{configObj}) if d.HasError() { diags.Append(d...) return nullList @@ -941,13 +917,6 @@ func flattenAutoPauseNotificationsParameters(params *pagerduty.AutoPauseNotifica return list } -var incidentUrgencyTypeObjectType = types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "type": types.StringType, - "urgency": types.StringType, - }, -} - func flattenIncidentUrgencyRule(rule *pagerduty.IncidentUrgencyRule, diags *diag.Diagnostics) types.List { incidentUrgencyRuleObjectType := types.ObjectType{ AttrTypes: map[string]attr.Type{ @@ -1006,13 +975,6 @@ func flattenIncidentUrgencyType(urgency *pagerduty.IncidentUrgencyType, diags *d return list } -var scheduledActionAtObjectType = types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "type": types.StringType, - "name": types.StringType, - }, -} - func flattenScheduledActions(actions []pagerduty.ScheduledAction, diags *diag.Diagnostics) types.List { scheduledActionObjectType := types.ObjectType{ AttrTypes: map[string]attr.Type{ @@ -1099,3 +1061,43 @@ func flattenSupportHours(hours *pagerduty.SupportHours, diags *diag.Diagnostics) diags.Append(d...) return list } + +var ( + alertGroupingParametersTypeType = enumtypes.StringType{ + OneOf: []string{"time", "intelligent", "content_based"}} + alertGroupingParametersConfigAggregateType = enumtypes.StringType{ + OneOf: []string{"all", "any"}} + autoPauseNotificationsParametersTimeoutType = enumtypes.Int64Type{ + OneOf: []int64{120, 180, 300, 600, 900}} + + alertGroupingParametersConfigObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "aggregate": alertGroupingParametersConfigAggregateType, + "fields": types.ListType{ElemType: types.StringType}, + "timeout": types.Int64Type, + "time_window": types.Int64Type, + }, + } + alertGroupingParametersObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": alertGroupingParametersTypeType, + "config": types.ListType{ElemType: alertGroupingParametersConfigObjectType}, + }, + } + + alertGroupingParametersPath = path.Root("alert_grouping_parameters").AtListIndex(0) + + incidentUrgencyTypeObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "urgency": types.StringType, + }, + } + + scheduledActionAtObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "name": types.StringType, + }, + } +) From 3bc9c9e00128a26674c5a7fbcd84fda6defffda7 Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Fri, 10 May 2024 06:50:55 -0400 Subject: [PATCH 07/11] Add useless modifyplan for simulating a computed alert params config --- pagerdutyplugin/resource_pagerduty_service.go | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pagerdutyplugin/resource_pagerduty_service.go b/pagerdutyplugin/resource_pagerduty_service.go index 82e7150b2..64537de28 100644 --- a/pagerdutyplugin/resource_pagerduty_service.go +++ b/pagerdutyplugin/resource_pagerduty_service.go @@ -346,6 +346,22 @@ func (r *resourceService) ValidateConfig(ctx context.Context, req resource.Valid } } +func (r *resourceService) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { + // Set alert_grouping_parameters as Computed then "intelligent" type + var pType types.String + var config types.List + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, alertGroupingParametersPath.AtName("type"), &pType)...) + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, alertGroupingParametersPath.AtName("config"), &config)...) + if resp.Diagnostics.HasError() { + return + } + if pType.ValueString() == "intelligent" && config.IsNull() { + resp.Diagnostics.Append( + resp.Plan.SetAttribute(ctx, alertGroupingParametersPath.AtName("config"), types.ListUnknown(alertGroupingParametersConfigObjectType))..., + ) + } +} + func (r *resourceService) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { var config resourceServiceModel var model resourceServiceModel From 98d0807a26bf3872e844a34221f87d8a2602755c Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Fri, 10 May 2024 06:51:10 -0400 Subject: [PATCH 08/11] Add TimeWindow to vendor go-pagerduty --- vendor/github.com/PagerDuty/go-pagerduty/service.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/vendor/github.com/PagerDuty/go-pagerduty/service.go b/vendor/github.com/PagerDuty/go-pagerduty/service.go index d877a63eb..06ede531c 100644 --- a/vendor/github.com/PagerDuty/go-pagerduty/service.go +++ b/vendor/github.com/PagerDuty/go-pagerduty/service.go @@ -114,9 +114,10 @@ type AlertGroupingParameters struct { // AlertGroupParamsConfig is the config object on alert_grouping_parameters type AlertGroupParamsConfig struct { - Timeout *uint `json:"timeout,omitempty"` - Aggregate string `json:"aggregate,omitempty"` - Fields []string `json:"fields,omitempty"` + Timeout *uint `json:"timeout,omitempty"` + Aggregate string `json:"aggregate,omitempty"` + Fields []string `json:"fields,omitempty"` + TimeWindow *uint `json:"time_window,omitempty"` } // ListServiceOptions is the data structure used when calling the ListServices API endpoint. From 5daccd7fc25d8593124499fe7b328b631432e7f4 Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Fri, 10 May 2024 07:33:03 -0400 Subject: [PATCH 09/11] Add planmodify util package --- pagerdutyplugin/resource_pagerduty_service.go | 2 + .../use_null_for_removed_with_state.go | 37 +++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 util/planmodify/use_null_for_removed_with_state.go diff --git a/pagerdutyplugin/resource_pagerduty_service.go b/pagerdutyplugin/resource_pagerduty_service.go index 64537de28..cea045ff8 100644 --- a/pagerdutyplugin/resource_pagerduty_service.go +++ b/pagerdutyplugin/resource_pagerduty_service.go @@ -10,6 +10,7 @@ import ( "github.com/PagerDuty/go-pagerduty" "github.com/PagerDuty/terraform-provider-pagerduty/util" "github.com/PagerDuty/terraform-provider-pagerduty/util/enumtypes" + "github.com/PagerDuty/terraform-provider-pagerduty/util/planmodify" "github.com/PagerDuty/terraform-provider-pagerduty/util/tztypes" "github.com/PagerDuty/terraform-provider-pagerduty/util/validate" "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" @@ -237,6 +238,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest PlanModifiers: []planmodifier.List{ listplanmodifier.RequiresReplace(), listplanmodifier.UseStateForUnknown(), + planmodify.UseNullForRemovedWithState(), }, Validators: []validator.List{ listvalidator.SizeBetween(1, 1), diff --git a/util/planmodify/use_null_for_removed_with_state.go b/util/planmodify/use_null_for_removed_with_state.go new file mode 100644 index 000000000..764f563e1 --- /dev/null +++ b/util/planmodify/use_null_for_removed_with_state.go @@ -0,0 +1,37 @@ +package planmodify + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// UseNullForRemovedWithState sets plan to null if the list has an state, but +// the configuration is now null +func UseNullForRemovedWithState() planmodifier.List { + return useNullForRemovedWithStateModifier{} +} + +type useNullForRemovedWithStateModifier struct{} + +func (m useNullForRemovedWithStateModifier) Description(_ context.Context) string { + return "Removes the value if the list has an state, but the configuration changes to null" +} + +func (m useNullForRemovedWithStateModifier) MarkdownDescription(_ context.Context) string { + return "Removes the value if the list has an state, but the configuration changes to null" +} + +func (m useNullForRemovedWithStateModifier) PlanModifyList(_ context.Context, req planmodifier.ListRequest, resp *planmodifier.ListResponse) { + // Do nothing if there is no state value. + if req.StateValue.IsNull() { + return + } + + // Do nothing if there is a known or an unknown configuration value. + if !req.ConfigValue.IsNull() { + return + } + + resp.PlanValue = req.ConfigValue +} From 4099cdb95bacae8fb86943e92b351d37be1f54da Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Fri, 10 May 2024 07:47:36 -0400 Subject: [PATCH 10/11] Fix some tests --- pagerdutyplugin/resource_pagerduty_service.go | 20 ------------------- .../resource_pagerduty_service_test.go | 9 ++++----- 2 files changed, 4 insertions(+), 25 deletions(-) diff --git a/pagerdutyplugin/resource_pagerduty_service.go b/pagerdutyplugin/resource_pagerduty_service.go index cea045ff8..7a36d456d 100644 --- a/pagerdutyplugin/resource_pagerduty_service.go +++ b/pagerdutyplugin/resource_pagerduty_service.go @@ -236,7 +236,6 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest }, }, PlanModifiers: []planmodifier.List{ - listplanmodifier.RequiresReplace(), listplanmodifier.UseStateForUnknown(), planmodify.UseNullForRemovedWithState(), }, @@ -252,7 +251,6 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest listvalidator.SizeBetween(1, 1), }, PlanModifiers: []planmodifier.List{ - listplanmodifier.RequiresReplace(), listplanmodifier.UseStateForUnknown(), }, ElementType: types.ObjectType{ @@ -348,22 +346,6 @@ func (r *resourceService) ValidateConfig(ctx context.Context, req resource.Valid } } -func (r *resourceService) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { - // Set alert_grouping_parameters as Computed then "intelligent" type - var pType types.String - var config types.List - resp.Diagnostics.Append(req.Config.GetAttribute(ctx, alertGroupingParametersPath.AtName("type"), &pType)...) - resp.Diagnostics.Append(req.Config.GetAttribute(ctx, alertGroupingParametersPath.AtName("config"), &config)...) - if resp.Diagnostics.HasError() { - return - } - if pType.ValueString() == "intelligent" && config.IsNull() { - resp.Diagnostics.Append( - resp.Plan.SetAttribute(ctx, alertGroupingParametersPath.AtName("config"), types.ListUnknown(alertGroupingParametersConfigObjectType))..., - ) - } -} - func (r *resourceService) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { var config resourceServiceModel var model resourceServiceModel @@ -1103,8 +1085,6 @@ var ( }, } - alertGroupingParametersPath = path.Root("alert_grouping_parameters").AtListIndex(0) - incidentUrgencyTypeObjectType = types.ObjectType{ AttrTypes: map[string]attr.Type{ "type": types.StringType, diff --git a/pagerdutyplugin/resource_pagerduty_service_test.go b/pagerdutyplugin/resource_pagerduty_service_test.go index dbaf8dabc..c60d33c97 100644 --- a/pagerdutyplugin/resource_pagerduty_service_test.go +++ b/pagerdutyplugin/resource_pagerduty_service_test.go @@ -701,8 +701,8 @@ func TestAccPagerDutyService_AutoPauseNotificationsParameters(t *testing.T) { "pagerduty_service.foo", "auto_pause_notifications_parameters.#", "1"), resource.TestCheckResourceAttr( "pagerduty_service.foo", "auto_pause_notifications_parameters.0.enabled", "false"), - resource.TestCheckResourceAttr( - "pagerduty_service.foo", "auto_pause_notifications_parameters.0.timeout", "0"), + resource.TestCheckNoResourceAttr( + "pagerduty_service.foo", "auto_pause_notifications_parameters.0.timeout"), ), }, { @@ -723,8 +723,8 @@ func TestAccPagerDutyService_AutoPauseNotificationsParameters(t *testing.T) { "pagerduty_service.foo", "auto_pause_notifications_parameters.#", "1"), resource.TestCheckResourceAttr( "pagerduty_service.foo", "auto_pause_notifications_parameters.0.enabled", "false"), - resource.TestCheckResourceAttr( - "pagerduty_service.foo", "auto_pause_notifications_parameters.0.timeout", "0"), + resource.TestCheckNoResourceAttr( + "pagerduty_service.foo", "auto_pause_notifications_parameters.0.timeout"), ), }, }, @@ -2027,7 +2027,6 @@ resource "pagerduty_service" "foo" { incident_urgency_rule { type = "use_support_hours" - during_support_hours { type = "constant" urgency = "high" From 734a2af3d67ae9621d66e8ddfd566f60758d1a74 Mon Sep 17 00:00:00 2001 From: Carlos Gajardo Date: Thu, 16 May 2024 10:42:17 -0400 Subject: [PATCH 11/11] wip --- pagerdutyplugin/resource_pagerduty_service.go | 38 +++++--------- util/optiontypes/int64.go | 44 ++++++++++++++++ util/optiontypes/list.go | 52 +++++++++++++++++++ 3 files changed, 108 insertions(+), 26 deletions(-) create mode 100644 util/optiontypes/int64.go create mode 100644 util/optiontypes/list.go diff --git a/pagerdutyplugin/resource_pagerduty_service.go b/pagerdutyplugin/resource_pagerduty_service.go index 7a36d456d..e11dc5a4f 100644 --- a/pagerdutyplugin/resource_pagerduty_service.go +++ b/pagerdutyplugin/resource_pagerduty_service.go @@ -10,6 +10,7 @@ import ( "github.com/PagerDuty/go-pagerduty" "github.com/PagerDuty/terraform-provider-pagerduty/util" "github.com/PagerDuty/terraform-provider-pagerduty/util/enumtypes" + "github.com/PagerDuty/terraform-provider-pagerduty/util/optiontypes" "github.com/PagerDuty/terraform-provider-pagerduty/util/planmodify" "github.com/PagerDuty/terraform-provider-pagerduty/util/tztypes" "github.com/PagerDuty/terraform-provider-pagerduty/util/validate" @@ -152,21 +153,7 @@ func (r *resourceService) Schema(ctx context.Context, req resource.SchemaRequest listvalidator.ConflictsWith(path.MatchRoot("alert_grouping")), listvalidator.ConflictsWith(path.MatchRoot("alert_grouping_timeout")), }, - ElementType: types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "type": alertGroupingParametersTypeType, - "config": types.ListType{ - ElemType: types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "timeout": types.Int64Type, - "fields": types.ListType{ElemType: types.StringType}, - "aggregate": alertGroupingParametersConfigAggregateType, - "time_window": types.Int64Type, - }, - }, - }, - }, - }, + ElementType: alertGroupingParametersObjectType, PlanModifiers: []planmodifier.List{listplanmodifier.UseStateForUnknown()}, }, @@ -382,9 +369,6 @@ func (r *resourceService) Create(ctx context.Context, req resource.CreateRequest return retry.RetryableError(err) } model = flattenService(ctx, serviceResponse, config, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return retry.NonRetryableError(fmt.Errorf("%#v", resp.Diagnostics)) - } return nil }) if err != nil { @@ -819,8 +803,7 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert return nullList } - configList := types.ListNull(alertGroupingParametersConfigObjectType) - log.Printf("[CG] config %#v", params.Config) + configList := optiontypes.NewListNull(alertGroupingParametersConfigObjectType) if params.Config != nil { fieldsList, d := types.ListValueFrom(ctx, types.StringType, params.Config.Fields) if d.HasError() { @@ -833,9 +816,9 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert timeout = types.Int64Value(int64(*params.Config.Timeout)) } - timeWindow := types.Int64Null() + timeWindow := optiontypes.NewInt64Null() if params.Config.TimeWindow != nil { - timeWindow = types.Int64Value(int64(*params.Config.TimeWindow)) + timeWindow = optiontypes.NewInt64Value(int64(*params.Config.TimeWindow)) } aggregate := enumtypes.NewStringNull(alertGroupingParametersConfigAggregateType) @@ -853,7 +836,8 @@ func flattenAlertGroupingParameters(ctx context.Context, params *pagerduty.Alert diags.Append(d...) return nullList } - configList, d = types.ListValue(alertGroupingParametersConfigObjectType, []attr.Value{configObj}) + + configList, d = optiontypes.NewListValue(alertGroupingParametersConfigObjectType, []attr.Value{configObj}) if d.HasError() { diags.Append(d...) return nullList @@ -1075,13 +1059,15 @@ var ( "aggregate": alertGroupingParametersConfigAggregateType, "fields": types.ListType{ElemType: types.StringType}, "timeout": types.Int64Type, - "time_window": types.Int64Type, + "time_window": optiontypes.Int64Type{}, }, } alertGroupingParametersObjectType = types.ObjectType{ AttrTypes: map[string]attr.Type{ - "type": alertGroupingParametersTypeType, - "config": types.ListType{ElemType: alertGroupingParametersConfigObjectType}, + "type": alertGroupingParametersTypeType, + "config": optiontypes.ListType{ + ListType: types.ListType{ElemType: alertGroupingParametersConfigObjectType}, + }, }, } diff --git a/util/optiontypes/int64.go b/util/optiontypes/int64.go new file mode 100644 index 000000000..a67d6c86e --- /dev/null +++ b/util/optiontypes/int64.go @@ -0,0 +1,44 @@ +package optiontypes + +import ( + "context" + "log" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +type Int64Value struct { + basetypes.Int64Value +} + +func NewInt64Value(i int64) attr.Value { return Int64Value{basetypes.NewInt64Value(i)} } +func NewInt64Null() attr.Value { return Int64Value{basetypes.NewInt64Null()} } +func NewInt64Unknown() attr.Value { return Int64Value{basetypes.NewInt64Unknown()} } + +func (i Int64Value) Type(ctx context.Context) attr.Type { return Int64Type{} } + +func (i Int64Value) Equal(o attr.Value) bool { + log.Printf("[cg] o=%#v i=%#v", o, i) + if i.IsNull() || o.IsNull() { + return true + } + if o2, ok := o.(Int64Value); ok { + o = o2.Int64Value + } + return i.Int64Value.Equal(o) +} + +type Int64Type struct { + basetypes.Int64Type +} + +func (t Int64Type) String() string { return "optiontypes.Int64Type" } + +func (t Int64Type) Equal(o attr.Type) bool { + if _, ok := o.(Int64Type); ok { + return true + } + _, ok := o.(basetypes.Int64Type) + return ok +} diff --git a/util/optiontypes/list.go b/util/optiontypes/list.go new file mode 100644 index 000000000..7411edf4d --- /dev/null +++ b/util/optiontypes/list.go @@ -0,0 +1,52 @@ +package optiontypes + +import ( + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +type ListValue struct { + basetypes.ListValue +} + +func (l ListValue) Equal(o attr.Value) bool { + other, ok := o.(ListValue) + if !ok { + if l.IsNull() || other.IsNull() { + return true + } + o = other.ListValue + } + + return l.ListValue.Equal(o) +} + +func NewListNull(t attr.Type) ListValue { + return ListValue{basetypes.NewListNull(t)} +} + +func NewListUnknown(t attr.Type) ListValue { + return ListValue{basetypes.NewListUnknown(t)} +} + +func NewListValue(t attr.Type, elements []attr.Value) (ListValue, diag.Diagnostics) { + l, diags := basetypes.NewListValue(t, elements) + return ListValue{l}, diags +} + +type ListType struct { + basetypes.ListType +} + +func (l ListType) Equal(o attr.Type) bool { + if _, ok := o.(ListType); ok { + return true + } + _, ok := o.(basetypes.ListType) + return ok +} + +func (l ListType) String() string { + return "optiontypes.ListType[" + l.ElementType().String() + "]" +}