diff --git a/github/repository_rules_validation_utils.go b/github/repository_rules_validation_utils.go new file mode 100644 index 0000000000..a475e4dd28 --- /dev/null +++ b/github/repository_rules_validation_utils.go @@ -0,0 +1,157 @@ +package github + +import ( + "context" + "fmt" + "slices" + + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// branchTagOnlyRules contains rules that are only valid for branch and tag targets. +// +// These rules apply to ref-based operations (branches and tags) and are not supported +// for push rulesets which operate on file content. +// +// To verify/maintain this list: +// 1. Check the GitHub API documentation for organization rulesets: +// https://docs.github.com/en/rest/orgs/rules?apiVersion=2022-11-28#create-an-organization-repository-ruleset +// 2. The API docs don't clearly separate push vs branch/tag rules. To verify, +// attempt to create a push ruleset via API or UI with each rule type. +// Push rulesets will reject branch/tag rules with "Invalid rule ''" error. +// 3. Generally, push rules deal with file content (paths, sizes, extensions), +// while branch/tag rules deal with ref lifecycle and merge requirements. +var branchTagOnlyRules = []string{ + "creation", + "update", + "deletion", + "required_linear_history", + "required_signatures", + "pull_request", + "required_status_checks", + "non_fast_forward", + "commit_message_pattern", + "commit_author_email_pattern", + "committer_email_pattern", + "branch_name_pattern", + "tag_name_pattern", + "required_workflows", + "required_code_scanning", + "required_deployments", + "merge_queue", +} + +// pushOnlyRules contains rules that are only valid for push targets. +// +// These rules apply to push operations and control what content can be pushed +// to repositories. They are not supported for branch or tag rulesets. +// +// To verify/maintain this list: +// 1. Check the GitHub API documentation for organization rulesets: +// https://docs.github.com/en/rest/orgs/rules?apiVersion=2022-11-28#create-an-organization-repository-ruleset +// 2. The API docs don't clearly separate push vs branch/tag rules. To verify, +// attempt to create a branch ruleset via API or UI with each rule type. +// Branch rulesets will reject push-only rules with an error. +// 3. Push rules control file content: paths, sizes, extensions, path lengths. +var pushOnlyRules = []string{ + "file_path_restriction", + "max_file_path_length", + "file_extension_restriction", + "max_file_size", +} + +func validateRulesForTarget(ctx context.Context, d *schema.ResourceDiff) error { + target := d.Get("target").(string) + tflog.Debug(ctx, "Validating rules for target", map[string]any{"target": target}) + + switch target { + case "push": + return validateRulesForPushTarget(ctx, d) + case "branch", "tag": + return validateRulesForBranchTagTarget(ctx, d) + } + + tflog.Debug(ctx, "Rules validation passed", map[string]any{"target": target}) + return nil +} + +func validateRulesForPushTarget(ctx context.Context, d *schema.ResourceDiff) error { + return validateRules(ctx, d, pushOnlyRules) +} + +func validateRulesForBranchTagTarget(ctx context.Context, d *schema.ResourceDiff) error { + return validateRules(ctx, d, branchTagOnlyRules) +} + +func validateRules(ctx context.Context, d *schema.ResourceDiff, allowedRules []string) error { + target := d.Get("target").(string) + rules := d.Get("rules").([]any)[0].(map[string]any) + for ruleName := range rules { + ruleValue, exists := d.GetOk(fmt.Sprintf("rules.0.%s", ruleName)) + if !exists { + continue + } + switch ruleValue := ruleValue.(type) { + case []any: + if len(ruleValue) == 0 { + continue + } + case map[string]any: + if len(ruleValue) == 0 { + continue + } + case any: + if ruleValue == nil { + continue + } + } + if slices.Contains(allowedRules, ruleName) { + continue + } else { + tflog.Debug(ctx, fmt.Sprintf("Invalid rule for %s target", target), map[string]any{"rule": ruleName, "value": ruleValue}) + return fmt.Errorf("rule %q is not valid for %[2]s target; %[2]s targets only support: %v", ruleName, target, allowedRules) + } + } + tflog.Debug(ctx, fmt.Sprintf("Rules validation passed for %s target", target)) + return nil +} + +func validateRepositoryRulesetConditionsFieldForBranchAndTagTargets(ctx context.Context, target string, conditions map[string]any) error { + tflog.Debug(ctx, fmt.Sprintf("Validating conditions field for %s target", target), map[string]any{"target": target, "conditions": conditions}) + + if conditions["ref_name"] == nil || len(conditions["ref_name"].([]any)) == 0 { + tflog.Debug(ctx, fmt.Sprintf("Missing ref_name for %s target", target), map[string]any{"target": target}) + return fmt.Errorf("ref_name must be set for %s target", target) + } + + tflog.Debug(ctx, fmt.Sprintf("Conditions validation passed for %s target", target)) + return nil +} + +func validateConditionsFieldForBranchAndTagTargets(ctx context.Context, target string, conditions map[string]any) error { + tflog.Debug(ctx, fmt.Sprintf("Validating conditions field for %s target", target), map[string]any{"target": target, "conditions": conditions}) + + if conditions["ref_name"] == nil || len(conditions["ref_name"].([]any)) == 0 { + tflog.Debug(ctx, fmt.Sprintf("Missing ref_name for %s target", target), map[string]any{"target": target}) + return fmt.Errorf("ref_name must be set for %s target", target) + } + + if (conditions["repository_name"] == nil || len(conditions["repository_name"].([]any)) == 0) && (conditions["repository_id"] == nil || len(conditions["repository_id"].([]any)) == 0) { + tflog.Debug(ctx, fmt.Sprintf("Missing repository_name or repository_id for %s target", target), map[string]any{"target": target}) + return fmt.Errorf("either repository_name or repository_id must be set for %s target", target) + } + tflog.Debug(ctx, fmt.Sprintf("Conditions validation passed for %s target", target)) + return nil +} + +func validateConditionsFieldForPushTarget(ctx context.Context, conditions map[string]any) error { + tflog.Debug(ctx, "Validating conditions field for push target", map[string]any{"target": "push", "conditions": conditions}) + + if conditions["ref_name"] != nil && len(conditions["ref_name"].([]any)) > 0 { + tflog.Debug(ctx, "Invalid ref_name for push target", map[string]any{"ref_name": conditions["ref_name"]}) + return fmt.Errorf("ref_name must not be set for push target") + } + tflog.Debug(ctx, "Conditions validation passed for push target") + return nil +} diff --git a/github/repository_rules_validation_utils_test.go b/github/repository_rules_validation_utils_test.go new file mode 100644 index 0000000000..91915561fb --- /dev/null +++ b/github/repository_rules_validation_utils_test.go @@ -0,0 +1,225 @@ +package github + +import ( + "testing" +) + +func TestValidateConditionsFieldForPushTarget(t *testing.T) { + tests := []struct { + name string + conditions map[string]any + expectError bool + errorMsg string + }{ + { + name: "valid push target without ref_name", + conditions: map[string]any{ + "repository_name": []any{map[string]any{"include": []any{"~ALL"}, "exclude": []any{}}}, + }, + expectError: false, + }, + { + name: "valid push target with nil ref_name", + conditions: map[string]any{"ref_name": nil}, + expectError: false, + }, + { + name: "valid push target with empty ref_name slice", + conditions: map[string]any{"ref_name": []any{}}, + expectError: false, + }, + { + name: "invalid push target with ref_name set", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"~ALL"}, "exclude": []any{}}}, + }, + expectError: true, + errorMsg: "ref_name must not be set for push target", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateConditionsFieldForPushTarget(t.Context(), tt.conditions) + if tt.expectError { + if err == nil { + t.Errorf("expected error but got nil") + } else if err.Error() != tt.errorMsg { + t.Errorf("expected error %q, got %q", tt.errorMsg, err.Error()) + } + } else { + if err != nil { + t.Errorf("expected no error but got: %v", err) + } + } + }) + } +} + +func TestValidateRepositoryRulesetConditionsFieldForBranchAndTagTargets(t *testing.T) { + tests := []struct { + name string + target string + conditions map[string]any + expectError bool + errorMsg string + }{ + { + name: "valid branch target with ref_name", + target: "branch", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"~DEFAULT_BRANCH"}, "exclude": []any{}}}, + }, + expectError: false, + }, + { + name: "valid tag target with ref_name", + target: "tag", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"v*"}, "exclude": []any{}}}, + }, + expectError: false, + }, + { + name: "invalid branch target without ref_name", + target: "branch", + conditions: map[string]any{}, + expectError: true, + errorMsg: "ref_name must be set for branch target", + }, + { + name: "invalid tag target without ref_name", + target: "tag", + conditions: map[string]any{}, + expectError: true, + errorMsg: "ref_name must be set for tag target", + }, + { + name: "invalid branch target with nil ref_name", + target: "branch", + conditions: map[string]any{"ref_name": nil}, + expectError: true, + errorMsg: "ref_name must be set for branch target", + }, + { + name: "invalid tag target with empty ref_name slice", + target: "tag", + conditions: map[string]any{"ref_name": []any{}}, + expectError: true, + errorMsg: "ref_name must be set for tag target", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateRepositoryRulesetConditionsFieldForBranchAndTagTargets(t.Context(), tt.target, tt.conditions) + if tt.expectError { + if err == nil { + t.Errorf("expected error but got nil") + } else if err.Error() != tt.errorMsg { + t.Errorf("expected error %q, got %q", tt.errorMsg, err.Error()) + } + } else { + if err != nil { + t.Errorf("expected no error but got: %v", err) + } + } + }) + } +} + +func TestValidateConditionsFieldForBranchAndTagTargets(t *testing.T) { + tests := []struct { + name string + target string + conditions map[string]any + expectError bool + errorMsg string + }{ + { + name: "valid branch target with ref_name and repository_name", + target: "branch", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"~DEFAULT_BRANCH"}, "exclude": []any{}}}, + "repository_name": []any{map[string]any{"include": []any{"~ALL"}, "exclude": []any{}}}, + }, + expectError: false, + }, + { + name: "valid tag target with ref_name and repository_id", + target: "tag", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"v*"}, "exclude": []any{}}}, + "repository_id": []any{123, 456}, + }, + expectError: false, + }, + { + name: "invalid branch target without ref_name", + target: "branch", + conditions: map[string]any{ + "repository_name": []any{map[string]any{"include": []any{"~ALL"}, "exclude": []any{}}}, + }, + expectError: true, + errorMsg: "ref_name must be set for branch target", + }, + { + name: "invalid branch target without repository_name or repository_id", + target: "branch", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"~DEFAULT_BRANCH"}, "exclude": []any{}}}, + }, + expectError: true, + errorMsg: "either repository_name or repository_id must be set for branch target", + }, + { + name: "invalid tag target with nil repository_name and repository_id", + target: "tag", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"v*"}, "exclude": []any{}}}, + "repository_name": nil, + "repository_id": nil, + }, + expectError: true, + errorMsg: "either repository_name or repository_id must be set for tag target", + }, + { + name: "invalid branch target with empty repository_name and repository_id slices", + target: "branch", + conditions: map[string]any{ + "ref_name": []any{map[string]any{"include": []any{"~DEFAULT_BRANCH"}, "exclude": []any{}}}, + "repository_name": []any{}, + "repository_id": []any{}, + }, + expectError: true, + errorMsg: "either repository_name or repository_id must be set for branch target", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateConditionsFieldForBranchAndTagTargets(t.Context(), tt.target, tt.conditions) + if tt.expectError { + if err == nil { + t.Errorf("expected error but got nil") + } else if err.Error() != tt.errorMsg { + t.Errorf("expected error %q, got %q", tt.errorMsg, err.Error()) + } + } else { + if err != nil { + t.Errorf("expected no error but got: %v", err) + } + } + }) + } +} + +func TestRuleListsDoNotOverlap(t *testing.T) { + for _, pushRule := range pushOnlyRules { + for _, branchTagRule := range branchTagOnlyRules { + if pushRule == branchTagRule { + t.Errorf("rule %q appears in both pushOnlyRules and branchTagOnlyRules", pushRule) + } + } + } +} diff --git a/github/resource_github_organization_ruleset.go b/github/resource_github_organization_ruleset.go index eea93ffc0a..bd9a9eb31e 100644 --- a/github/resource_github_organization_ruleset.go +++ b/github/resource_github_organization_ruleset.go @@ -5,11 +5,13 @@ import ( "errors" "fmt" "net/http" + "regexp" "strconv" "github.com/google/go-github/v81/github" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) @@ -26,6 +28,11 @@ func resourceGithubOrganizationRuleset() *schema.Resource { SchemaVersion: 1, + CustomizeDiff: customdiff.All( + validateConditionsFieldBasedOnTarget, + validateOrganizationRulesetRules, + ), + Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, @@ -34,16 +41,17 @@ func resourceGithubOrganizationRuleset() *schema.Resource { Description: "The name of the ruleset.", }, "target": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Required: true, + // The API accepts an `repository` target, but any rule created with that doesn't show up in the UI, nor does it have any rules. ValidateFunc: validation.StringInSlice([]string{"branch", "tag", "push"}, false), - Description: "Possible values are `branch`, `tag` and `push`. Note: The `push` target is in beta and is subject to change.", + Description: "The target of the ruleset. Possible values are `branch`, `tag` and `push`.", }, "enforcement": { Type: schema.TypeString, Required: true, ValidateFunc: validation.StringInSlice([]string{"disabled", "active", "evaluate"}, false), - Description: "Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`.", + Description: "The enforcement level of the ruleset. `evaluate` allows admins to test rules before enforcing them. Possible values are `disabled`, `active`, and `evaluate`. Note: `evaluate` is only available for Enterprise plans.", }, "bypass_actors": { Type: schema.TypeList, // TODO: These are returned from GH API sorted by actor_id, we might want to investigate if we want to include sorting @@ -62,7 +70,7 @@ func resourceGithubOrganizationRuleset() *schema.Resource { Type: schema.TypeString, Required: true, ValidateFunc: validation.StringInSlice([]string{"Integration", "OrganizationAdmin", "RepositoryRole", "Team", "DeployKey"}, false), - Description: "The type of actor that can bypass a ruleset. See https://docs.github.com/en/rest/orgs/rules for more information", + Description: "The type of actor that can bypass a ruleset. Can be one of: `Integration`, `OrganizationAdmin`, `RepositoryRole`, `Team`, or `DeployKey`.", }, "bypass_mode": { Type: schema.TypeString, @@ -87,13 +95,14 @@ func resourceGithubOrganizationRuleset() *schema.Resource { Type: schema.TypeList, Optional: true, MaxItems: 1, - Description: "Parameters for an organization ruleset condition. `ref_name` is required alongside one of `repository_name` or `repository_id`.", + Description: "Parameters for an organization ruleset condition. `ref_name` is required for `branch` and `tag` targets, but must not be set for `push` targets. One of `repository_name` or `repository_id` is always required.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "ref_name": { - Type: schema.TypeList, - Required: true, - MaxItems: 1, + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Targets refs that match the specified patterns. Required for `branch` and `tag` targets.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "include": { @@ -119,6 +128,7 @@ func resourceGithubOrganizationRuleset() *schema.Resource { Type: schema.TypeList, Optional: true, MaxItems: 1, + Description: "Targets repositories that match the specified name patterns.", ExactlyOneOf: []string{"conditions.0.repository_id"}, AtLeastOneOf: []string{"conditions.0.repository_id"}, Elem: &schema.Resource{ @@ -238,6 +248,7 @@ func resourceGithubOrganizationRuleset() *schema.Resource { Default: false, Description: "All conversations on code must be resolved before a pull request can be merged. Defaults to `false`.", }, + "required_reviewers": requiredReviewersSchema(), }, }, }, @@ -256,9 +267,10 @@ func resourceGithubOrganizationRuleset() *schema.Resource { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "context": { - Type: schema.TypeString, - Required: true, - Description: "The status check context name that must be present on the commit.", + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringIsNotEmpty), + Description: "The status check context name that must be present on the commit.", }, "integration_id": { Type: schema.TypeInt, @@ -286,7 +298,7 @@ func resourceGithubOrganizationRuleset() *schema.Resource { "non_fast_forward": { Type: schema.TypeBool, Optional: true, - Description: "Prevent users with push access from force pushing to branches.", + Description: "Prevent users with push access from force pushing to refs.", }, "commit_message_pattern": { Type: schema.TypeList, @@ -465,9 +477,10 @@ func resourceGithubOrganizationRuleset() *schema.Resource { Description: "The repository in which the workflow is defined.", }, "path": { - Type: schema.TypeString, - Required: true, - Description: "The path to the workflow YAML definition file.", + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: toDiagFunc(validation.StringMatch(regexp.MustCompile(`^\.github\/workflows\/.*$`), "Path must be in the .github/workflows directory"), "path"), + Description: "The path to the workflow YAML definition file.", }, "ref": { Type: schema.TypeString, @@ -589,8 +602,9 @@ func resourceGithubOrganizationRuleset() *schema.Resource { }, }, "etag": { - Type: schema.TypeString, - Computed: true, + Type: schema.TypeString, + Computed: true, + Description: "An etag representing the ruleset for caching purposes.", }, }, } @@ -688,7 +702,7 @@ func resourceGithubOrganizationRulesetRead(ctx context.Context, d *schema.Resour _ = d.Set("target", ruleset.GetTarget()) _ = d.Set("enforcement", ruleset.Enforcement) _ = d.Set("bypass_actors", flattenBypassActors(ruleset.BypassActors)) - _ = d.Set("conditions", flattenConditions(ruleset.GetConditions(), true)) + _ = d.Set("conditions", flattenConditionsWithContext(ctx, ruleset.GetConditions(), true)) _ = d.Set("rules", flattenRules(ruleset.Rules, true)) _ = d.Set("node_id", ruleset.GetNodeID()) _ = d.Set("etag", resp.Header.Get("ETag")) @@ -831,3 +845,37 @@ func resourceGithubOrganizationRulesetImport(ctx context.Context, d *schema.Reso return []*schema.ResourceData{d}, nil } + +func validateConditionsFieldBasedOnTarget(ctx context.Context, d *schema.ResourceDiff, meta any) error { + target := d.Get("target").(string) + tflog.Debug(ctx, "Validating conditions field based on target", map[string]any{"target": target}) + conditionsRaw := d.Get("conditions").([]any) + + if len(conditionsRaw) == 0 { + tflog.Debug(ctx, "An empty conditions block, skipping validation.", map[string]any{"target": target}) + return nil + } + + conditions := conditionsRaw[0].(map[string]any) + + switch target { + case "branch", "tag": + return validateConditionsFieldForBranchAndTagTargets(ctx, target, conditions) + case "push": + return validateConditionsFieldForPushTarget(ctx, conditions) + } + return nil +} + +func validateOrganizationRulesetRules(ctx context.Context, d *schema.ResourceDiff, _ any) error { + target := d.Get("target").(string) + tflog.Debug(ctx, "Validating organization ruleset rules based on target", map[string]any{"target": target}) + + rulesRaw := d.Get("rules").([]any) + if len(rulesRaw) == 0 { + tflog.Debug(ctx, "No rules block, skipping validation") + return nil + } + + return validateRulesForTarget(ctx, d) +} diff --git a/github/resource_github_organization_ruleset_test.go b/github/resource_github_organization_ruleset_test.go index 774315e6db..89f1e3fed8 100644 --- a/github/resource_github_organization_ruleset_test.go +++ b/github/resource_github_organization_ruleset_test.go @@ -2,6 +2,7 @@ package github import ( "fmt" + "regexp" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" @@ -35,10 +36,10 @@ name: Echo Workflow on: [pull_request] jobs: - echo: - runs-on: linux - steps: - - run: echo \"Hello, world!\" + echo: + runs-on: linux + steps: + - run: echo \"Hello, world!\" EOT commit_message = "Managed by Terraform" commit_author = "Terraform User" @@ -204,11 +205,6 @@ resource "github_organization_ruleset" "test" { include = ["~ALL"] exclude = [] } - - ref_name { - include = ["~ALL"] - exclude = [] - } } rules { @@ -493,6 +489,333 @@ resource "github_organization_ruleset" "test" { }, }) }) + + t.Run("validates_branch_target_requires_ref_name_condition", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + config := fmt.Sprintf(` + resource "github_organization_ruleset" "test" { + name = "test-validation-%s" + target = "branch" + enforcement = "active" + + conditions { + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } + } + `, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("ref_name must be set for branch target"), + }, + }, + }) + }) + + t.Run("validates_tag_target_requires_ref_name_condition", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + config := fmt.Sprintf(` + resource "github_organization_ruleset" "test" { + name = "test-tag-no-conditions-%s" + target = "tag" + enforcement = "active" + + conditions { + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } + } + `, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("ref_name must be set for tag target"), + }, + }, + }) + }) + + t.Run("validates_push_target_rejects_ref_name_condition", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + resourceName := "test-push-reject-ref-name" + config := fmt.Sprintf(` + resource "github_organization_ruleset" "%s" { + name = "test-push-with-ref-%s" + target = "push" + enforcement = "active" + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + # Push rulesets only support push-specific rules + max_file_size { + max_file_size = 100 + } + } + } + `, resourceName, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("ref_name must not be set for push target"), + }, + }, + }) + }) + + t.Run("validates_push_target_rejects_branch_or_tag_rules", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + resourceName := "test-push-reject-branch-rules" + config := fmt.Sprintf(` + resource "github_organization_ruleset" "%s" { + name = "test-push-branch-rule-%s" + target = "push" + enforcement = "active" + + conditions { + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + # 'creation' is a branch/tag rule, not valid for push target + creation = true + } + } + `, resourceName, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("rule .* is not valid for push target"), + }, + }, + }) + }) + + t.Run("validates_branch_target_rejects_push-only_rules", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + resourceName := "test-branch-reject-push-rules" + config := fmt.Sprintf(` + resource "github_organization_ruleset" "%s" { + name = "test-branch-push-rule-%s" + target = "branch" + enforcement = "active" + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + # 'max_file_size' is a push-only rule, not valid for branch target + max_file_size { + max_file_size = 100 + } + } + } + `, resourceName, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("rule .* is not valid for branch target"), + }, + }, + }) + }) + + t.Run("creates_push_ruleset", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + resourceName := "test-push-ruleset" + config := fmt.Sprintf(` + resource "github_organization_ruleset" "%s" { + name = "test-push-%s" + target = "push" + enforcement = "active" + + conditions { + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + # Push rulesets only support push-specific rules: + # file_path_restriction, max_file_path_length, file_extension_restriction, max_file_size + max_file_size { + max_file_size = 100 + } + } + } + `, resourceName, randomID) + + check := resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + fmt.Sprintf("github_organization_ruleset.%s", resourceName), + "name", + fmt.Sprintf("test-push-%s", randomID), + ), + resource.TestCheckResourceAttr( + fmt.Sprintf("github_organization_ruleset.%s", resourceName), + "target", + "push", + ), + resource.TestCheckResourceAttr( + fmt.Sprintf("github_organization_ruleset.%s", resourceName), + "enforcement", + "active", + ), + resource.TestCheckResourceAttr( + fmt.Sprintf("github_organization_ruleset.%s", resourceName), + "rules.0.max_file_size.0.max_file_size", + "100", + ), + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) + }) + + t.Run("validates_rules__required_status_checks_block", func(t *testing.T) { + t.Run("required_check__context_block_should_not_be_empty", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + resourceName := "test-required-status-checks-context-is-not-empty" + config := fmt.Sprintf(` + resource "github_organization_ruleset" "%s" { + name = "test-context-is-not-empty-%s" + target = "branch" + enforcement = "active" + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + required_status_checks { + required_check { + context = "" + } + } + } + } + `, resourceName, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("expected \"context\" to not be an empty string"), + }, + }, + }) + }) + t.Run("required_check_should_be_required_when_strict_required_status_checks_policy_is_set", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + resourceName := "test-required-check-is-required" + config := fmt.Sprintf(` + resource "github_organization_ruleset" "%s" { + name = "test-required-with-%s" + target = "branch" + enforcement = "active" + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + required_status_checks { + strict_required_status_checks_policy = true + } + } + } + `, resourceName, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("Insufficient required_check blocks"), + }, + }, + }) + }) + }) } func TestOrganizationPushRulesetSupport(t *testing.T) { @@ -618,3 +941,204 @@ func TestOrganizationPushRulesetSupport(t *testing.T) { t.Errorf("Expected 4 restricted file extensions, got %d", len(restrictedExts)) } } + +func TestAccGithubOrganizationRuleset_requiredReviewers(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + teamName := fmt.Sprintf("%steam-req-rev-%s", testResourcePrefix, randomID) + rulesetName := fmt.Sprintf("%s-ruleset-req-rev-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_team" "test" { + name = "%s" +} + +resource "github_organization_ruleset" "test" { + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + pull_request { + allowed_merge_methods = ["merge", "squash"] + required_approving_review_count = 1 + + required_reviewers { + reviewer { + id = github_team.test.id + type = "Team" + } + file_patterns = ["*.go", "src/**/*.ts"] + minimum_approvals = 1 + } + } + } +} +`, teamName, rulesetName) + + // Updated config: change minimum_approvals from 1 to 2 + configUpdated := fmt.Sprintf(` +resource "github_team" "test" { + name = "%s" +} + +resource "github_organization_ruleset" "test" { + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + pull_request { + allowed_merge_methods = ["merge", "squash"] + required_approving_review_count = 1 + + required_reviewers { + reviewer { + id = github_team.test.id + type = "Team" + } + file_patterns = ["*.go", "src/**/*.ts"] + minimum_approvals = 2 + } + } + } +} +`, teamName, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("github_organization_ruleset.test", "name", rulesetName), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "target", "branch"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "enforcement", "active"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.#", "1"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.minimum_approvals", "1"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.file_patterns.#", "2"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.file_patterns.0", "*.go"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.file_patterns.1", "src/**/*.ts"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.reviewer.0.type", "Team"), + ), + }, + { + Config: configUpdated, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.minimum_approvals", "2"), + ), + }, + { + ResourceName: "github_organization_ruleset.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func TestAccGithubOrganizationRuleset_requiredReviewersMultiple(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + teamName1 := fmt.Sprintf("%steam-req-rev-1-%s", testResourcePrefix, randomID) + teamName2 := fmt.Sprintf("%steam-req-rev-2-%s", testResourcePrefix, randomID) + rulesetName := fmt.Sprintf("%s-ruleset-multi-rev-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_team" "test1" { + name = "%s" +} + +resource "github_team" "test2" { + name = "%s" +} + +resource "github_organization_ruleset" "test" { + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + pull_request { + allowed_merge_methods = ["merge", "squash"] + required_approving_review_count = 1 + + required_reviewers { + reviewer { + id = github_team.test1.id + type = "Team" + } + file_patterns = ["*.go"] + minimum_approvals = 1 + } + + required_reviewers { + reviewer { + id = github_team.test2.id + type = "Team" + } + file_patterns = ["*.md", "docs/**/*"] + minimum_approvals = 1 + } + } + } +} +`, teamName1, teamName2, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("github_organization_ruleset.test", "name", rulesetName), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "target", "branch"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "enforcement", "active"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.#", "2"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.minimum_approvals", "1"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.file_patterns.#", "1"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.file_patterns.0", "*.go"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.1.minimum_approvals", "1"), + resource.TestCheckResourceAttr("github_organization_ruleset.test", "rules.0.pull_request.0.required_reviewers.1.file_patterns.#", "2"), + ), + }, + }, + }) +} diff --git a/github/resource_github_repository.go b/github/resource_github_repository.go index 4d69afbb15..cc15985823 100644 --- a/github/resource_github_repository.go +++ b/github/resource_github_repository.go @@ -908,7 +908,7 @@ func resourceGithubRepositoryRead(ctx context.Context, d *schema.ResourceData, m func resourceGithubRepositoryUpdate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { // Can only update a repository if it is not archived or the update is to - // archive the repository (unarchiving is not supported by the GitHub API) + // archive the repository if d.Get("archived").(bool) && !d.HasChange("archived") { log.Printf("[INFO] Skipping update of archived repository") return nil @@ -1013,7 +1013,7 @@ func resourceGithubRepositoryUpdate(ctx context.Context, d *schema.ResourceData, } } - if d.HasChanges("visibility", "private") { + if d.HasChanges("visibility", "private") && !d.Get("archived").(bool) { repoReq.Visibility = github.Ptr(visibility) repoReq.AllowForking = allowForking diff --git a/github/resource_github_repository_ruleset.go b/github/resource_github_repository_ruleset.go index ebb5400a75..c22d8ced8d 100644 --- a/github/resource_github_repository_ruleset.go +++ b/github/resource_github_repository_ruleset.go @@ -10,7 +10,9 @@ import ( "strconv" "github.com/google/go-github/v81/github" + "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) @@ -27,6 +29,11 @@ func resourceGithubRepositoryRuleset() *schema.Resource { SchemaVersion: 1, + CustomizeDiff: customdiff.All( + validateRepositoryRulesetConditions, + validateRepositoryRulesetRules, + ), + Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, @@ -229,6 +236,7 @@ func resourceGithubRepositoryRuleset() *schema.Resource { Default: false, Description: "All conversations on code must be resolved before a pull request can be merged. Defaults to `false`.", }, + "required_reviewers": requiredReviewersSchema(), }, }, }, @@ -770,3 +778,41 @@ func resourceGithubRepositoryRulesetImport(ctx context.Context, d *schema.Resour return []*schema.ResourceData{d}, nil } + +// validateRepositoryRulesetConditions validates conditions based on target type. +func validateRepositoryRulesetConditions(ctx context.Context, d *schema.ResourceDiff, _ any) error { + target := d.Get("target").(string) + tflog.Debug(ctx, "Validating repository ruleset conditions", map[string]any{"target": target}) + + conditionsRaw := d.Get("conditions").([]any) + if len(conditionsRaw) == 0 { + tflog.Debug(ctx, "No conditions block, skipping validation") + return nil + } + + conditions := conditionsRaw[0].(map[string]any) + + switch target { + case "branch", "tag": + return validateRepositoryRulesetConditionsFieldForBranchAndTagTargets(ctx, target, conditions) + case "push": + return validateConditionsFieldForPushTarget(ctx, conditions) + } + return nil +} + +func validateRepositoryRulesetRules(ctx context.Context, d *schema.ResourceDiff, _ any) error { + target := d.Get("target").(string) + tflog.Debug(ctx, "Validating repository ruleset rules based on target", map[string]any{"target": target}) + + rulesRaw := d.Get("rules").([]any) + if len(rulesRaw) == 0 { + tflog.Debug(ctx, "No rules block, skipping validation") + return nil + } + + value, exists := d.GetOk("rules.0.update_allows_fetch_and_merge") + tflog.Debug(ctx, "FOO", map[string]any{"foo": value, "exists": exists}) + + return validateRulesForTarget(ctx, d) +} diff --git a/github/resource_github_repository_ruleset_test.go b/github/resource_github_repository_ruleset_test.go index 54161a0af0..6fb8e45461 100644 --- a/github/resource_github_repository_ruleset_test.go +++ b/github/resource_github_repository_ruleset_test.go @@ -13,6 +13,13 @@ import ( ) func TestAccGithubRepositoryRuleset(t *testing.T) { + baseRepoVisibility := "public" + + if testAccConf.authMode == enterprise { + // This enables repos to be created even in GHEC EMU + baseRepoVisibility = "private" + } + t.Run("create_branch_ruleset", func(t *testing.T) { randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) repoName := fmt.Sprintf("%srepo-ruleset-%s", testResourcePrefix, randomID) @@ -23,6 +30,8 @@ resource "github_repository" "test" { auto_init = true default_branch = "main" vulnerability_alerts = true + + visibility = "%s" } resource "github_repository_environment" "example" { @@ -80,34 +89,36 @@ resource "github_repository_ruleset" "test" { required_signatures = false pull_request { - dismiss_stale_reviews_on_push = true - require_code_owner_review = true - require_last_push_approval = true + allowed_merge_methods = ["merge", "squash", "rebase"] required_approving_review_count = 2 required_review_thread_resolution = true + require_code_owner_review = true + dismiss_stale_reviews_on_push = true + require_last_push_approval = true } required_status_checks { - do_not_enforce_on_create = true - strict_required_status_checks_policy = true required_check { context = "ci" } - } - non_fast_forward = true + strict_required_status_checks_policy = true + do_not_enforce_on_create = true + } required_code_scanning { required_code_scanning_tool { - alerts_threshold = "errors" - security_alerts_threshold = "high_or_higher" - tool = "CodeQL" + alerts_threshold = "errors" + security_alerts_threshold = "high_or_higher" + tool = "CodeQL" } } + + non_fast_forward = true } } -`, repoName) +`, repoName, baseRepoVisibility) resource.Test(t, resource.TestCase{ PreCheck: func() { skipUnauthenticated(t) }, @@ -143,6 +154,8 @@ resource "github_repository_ruleset" "test" { name = "%s" auto_init = false vulnerability_alerts = true + + visibility = "%s" } resource "github_repository_environment" "example" { @@ -172,10 +185,10 @@ resource "github_repository_ruleset" "test" { } } } -`, repoName) +`, repoName, baseRepoVisibility) resource.Test(t, resource.TestCase{ - PreCheck: func() { skipUnlessMode(t, enterprise) }, + PreCheck: func() { skipUnauthenticated(t) }, ProviderFactories: providerFactories, Steps: []resource.TestStep{ { @@ -224,7 +237,7 @@ resource "github_repository_ruleset" "test" { } max_file_size { - max_file_size = 1048576 + max_file_size = 99 } file_extension_restriction { @@ -236,7 +249,7 @@ resource "github_repository_ruleset" "test" { `, repoName) resource.Test(t, resource.TestCase{ - PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + PreCheck: func() { skipUnlessEnterprise(t) }, Providers: testAccProviders, Steps: []resource.TestStep{ { @@ -245,14 +258,14 @@ resource "github_repository_ruleset" "test" { resource.TestCheckResourceAttr("github_repository_ruleset.test", "name", "test-push"), resource.TestCheckResourceAttr("github_repository_ruleset.test", "target", "push"), resource.TestCheckResourceAttr("github_repository_ruleset.test", "enforcement", "active"), - resource.TestCheckResourceAttr("github_organization_ruleset.test", "bypass_actors.#", "2"), - resource.TestCheckResourceAttr("github_organization_ruleset.test", "bypass_actors.0.actor_type", "DeployKey"), - resource.TestCheckResourceAttr("github_organization_ruleset.test", "bypass_actors.0.bypass_mode", "always"), - resource.TestCheckResourceAttr("github_organization_ruleset.test", "bypass_actors.1.actor_id", "5"), - resource.TestCheckResourceAttr("github_organization_ruleset.test", "bypass_actors.1.actor_type", "RepositoryRole"), - resource.TestCheckResourceAttr("github_organization_ruleset.test", "bypass_actors.1.bypass_mode", "always"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.#", "2"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.0.actor_type", "DeployKey"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.0.bypass_mode", "always"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.1.actor_id", "5"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.1.actor_type", "RepositoryRole"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.1.bypass_mode", "always"), resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.file_path_restriction.0.restricted_file_paths.0", "test.txt"), - resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.max_file_size.0.max_file_size", "1048576"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.max_file_size.0.max_file_size", "99"), resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.file_extension_restriction.0.restricted_file_extensions.0", "*.zip"), ), }, @@ -263,18 +276,20 @@ resource "github_repository_ruleset" "test" { t.Run("update_ruleset_name", func(t *testing.T) { randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) repoName := fmt.Sprintf("%srepo-ruleset-rename-%s", testResourcePrefix, randomID) - name := fmt.Sprintf(`ruleset-%[1]s`, randomID) - nameUpdated := fmt.Sprintf(`%[1]s-renamed`, randomID) + name := fmt.Sprintf("ruleset-%s", randomID) + nameUpdated := fmt.Sprintf("%s-renamed", name) config := ` resource "github_repository" "test" { - name = "%[1]s" - description = "Terraform acceptance tests %[2]s" + name = "%s" + description = "Terraform acceptance tests %s" vulnerability_alerts = true + + visibility = "%s" } resource "github_repository_ruleset" "test" { - name = "%[3]s" + name = "%s" repository = github_repository.test.id target = "branch" enforcement = "active" @@ -290,13 +305,13 @@ resource "github_repository_ruleset" "test" { ProviderFactories: providerFactories, Steps: []resource.TestStep{ { - Config: fmt.Sprintf(config, repoName, randomID, name), + Config: fmt.Sprintf(config, repoName, randomID, baseRepoVisibility, name), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("github_repository_ruleset.test", "name", name), ), }, { - Config: fmt.Sprintf(config, repoName, randomID, nameUpdated), + Config: fmt.Sprintf(config, repoName, randomID, baseRepoVisibility, nameUpdated), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("github_repository_ruleset.test", "name", nameUpdated), ), @@ -309,48 +324,25 @@ resource "github_repository_ruleset" "test" { randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) repoName := fmt.Sprintf("%srepo-ruleset-bypass-%s", testResourcePrefix, randomID) - config := fmt.Sprintf(` -resource "github_repository" "test" { - name = "%s" - description = "Terraform acceptance tests %[1]s" - auto_init = true + bypassActorsConfig := ` +bypass_actors { + actor_type = "DeployKey" + bypass_mode = "always" } -resource "github_repository_ruleset" "test" { - name = "test-bypass" - repository = github_repository.test.id - target = "branch" - enforcement = "active" - - bypass_actors { - actor_type = "DeployKey" - bypass_mode = "always" - } - - bypass_actors { - actor_id = 5 - actor_type = "RepositoryRole" - bypass_mode = "always" - } - - conditions { - ref_name { - include = ["~ALL"] - exclude = [] - } - } - - rules { - creation = true - } +bypass_actors { + actor_id = 5 + actor_type = "RepositoryRole" + bypass_mode = "always" } -`, repoName) - - configUpdated := fmt.Sprintf(` +` + baseConfig := ` resource "github_repository" "test" { name = "%s" description = "Terraform acceptance tests %[1]s" auto_init = true + + visibility = "%s" } resource "github_repository_ruleset" "test" { @@ -359,6 +351,8 @@ resource "github_repository_ruleset" "test" { target = "branch" enforcement = "active" + %s + conditions { ref_name { include = ["~ALL"] @@ -370,11 +364,13 @@ resource "github_repository_ruleset" "test" { creation = true } } -`, repoName) +` + config := fmt.Sprintf(baseConfig, repoName, baseRepoVisibility, bypassActorsConfig) + configUpdated := fmt.Sprintf(baseConfig, repoName, baseRepoVisibility, "") resource.Test(t, resource.TestCase{ - PreCheck: func() { skipUnauthenticated(t) }, - Providers: testAccProviders, + PreCheck: func() { skipUnauthenticated(t) }, + ProviderFactories: providerFactories, Steps: []resource.TestStep{ { Config: config, @@ -399,11 +395,13 @@ resource "github_repository_ruleset" "test" { bypassMode := "always" bypassModeUpdated := "exempt" - config := fmt.Sprintf(` + config := ` resource "github_repository" "test" { name = "%s" description = "Terraform acceptance tests %s" auto_init = true + + visibility = "%s" } resource "github_repository_ruleset" "test" { @@ -429,20 +427,20 @@ resource "github_repository_ruleset" "test" { creation = true } } -`, repoName, randomID, bypassMode) +` resource.Test(t, resource.TestCase{ PreCheck: func() { skipUnauthenticated(t) }, Providers: testAccProviders, Steps: []resource.TestStep{ { - Config: fmt.Sprintf(config, randomID, bypassMode), + Config: fmt.Sprintf(config, repoName, randomID, baseRepoVisibility, bypassMode), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.0.bypass_mode", bypassMode), ), }, { - Config: fmt.Sprintf(config, randomID, bypassModeUpdated), + Config: fmt.Sprintf(config, repoName, randomID, baseRepoVisibility, bypassModeUpdated), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("github_repository_ruleset.test", "bypass_actors.0.bypass_mode", bypassModeUpdated), ), @@ -461,7 +459,9 @@ resource "github_repository_ruleset" "test" { description = "Terraform acceptance tests %s" auto_init = true default_branch = "main" - vulnerability_alerts = true + vulnerability_alerts = true + + visibility = "%s" } resource "github_repository_environment" "example" { @@ -486,7 +486,7 @@ resource "github_repository_ruleset" "test" { creation = true } } - `, repoName, randomID) + `, repoName, randomID, baseRepoVisibility) resource.Test(t, resource.TestCase{ PreCheck: func() { skipUnauthenticated(t) }, @@ -508,6 +508,13 @@ resource "github_repository_ruleset" "test" { } func TestAccGithubRepositoryRulesetArchived(t *testing.T) { + baseRepoVisibility := "public" + + if testAccConf.authMode == enterprise { + // This enables repos to be created even in GHEC EMU + baseRepoVisibility = "private" + } + t.Run("skips update and delete on archived repository", func(t *testing.T) { randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) repoName := fmt.Sprintf("%srepo-ruleset-arch-%s", testResourcePrefix, randomID) @@ -516,6 +523,8 @@ func TestAccGithubRepositoryRulesetArchived(t *testing.T) { name = "%s" auto_init = true archived = false + + visibility = "%s" } resource "github_repository_ruleset" "test" { @@ -525,11 +534,11 @@ func TestAccGithubRepositoryRulesetArchived(t *testing.T) { enforcement = "active" rules { creation = true } } - `, repoName) + `, repoName, baseRepoVisibility) resource.Test(t, resource.TestCase{ - PreCheck: func() { skipUnlessMode(t, individual) }, - Providers: testAccProviders, + PreCheck: func() { skipUnauthenticated(t) }, + ProviderFactories: providerFactories, Steps: []resource.TestStep{ {Config: config}, {Config: strings.Replace(config, "archived = false", "archived = true", 1)}, @@ -546,6 +555,8 @@ func TestAccGithubRepositoryRulesetArchived(t *testing.T) { name = "%s" auto_init = true archived = true + + visibility = "%s" } resource "github_repository_ruleset" "test" { name = "test" @@ -554,11 +565,11 @@ func TestAccGithubRepositoryRulesetArchived(t *testing.T) { enforcement = "active" rules { creation = true } } - `, repoName) + `, repoName, baseRepoVisibility) resource.Test(t, resource.TestCase{ - PreCheck: func() { skipUnlessMode(t, individual) }, - Providers: testAccProviders, + PreCheck: func() { skipUnauthenticated(t) }, + ProviderFactories: providerFactories, Steps: []resource.TestStep{ {Config: config, ExpectError: regexp.MustCompile("cannot create ruleset on archived repository")}, }, @@ -566,6 +577,330 @@ func TestAccGithubRepositoryRulesetArchived(t *testing.T) { }) } +func TestAccGithubRepositoryRulesetValidation(t *testing.T) { + t.Run("Validates push target rejects ref_name condition", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + + config := fmt.Sprintf(` + resource "github_repository" "test" { + name = "tf-acc-test-push-ref-%s" + auto_init = true + visibility = "private" + vulnerability_alerts = true + } + + resource "github_repository_ruleset" "test" { + name = "test-push-with-ref" + repository = github_repository.test.id + target = "push" + enforcement = "active" + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + max_file_size { + max_file_size = 100 + } + } + } + `, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("ref_name must not be set for push target"), + }, + }, + }) + }) + + t.Run("Validates push target rejects branch/tag rules", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + config := fmt.Sprintf(` + resource "github_repository" "test" { + name = "tf-acc-test-push-rules-%s" + auto_init = true + visibility = "private" + vulnerability_alerts = true + } + + resource "github_repository_ruleset" "test" { + name = "test-push-branch-rule" + repository = github_repository.test.id + target = "push" + enforcement = "active" + + rules { + # 'creation' is a branch/tag rule, not valid for push target + creation = true + } + } + `, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("rule .* is not valid for push target"), + }, + }, + }) + }) + + t.Run("Validates branch target rejects push-only rules", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + config := fmt.Sprintf(` + resource "github_repository" "test" { + name = "tf-acc-test-branch-push-%s" + auto_init = true + vulnerability_alerts = true + + visibility = "private" + } + + resource "github_repository_ruleset" "test" { + name = "test-branch-push-rule" + repository = github_repository.test.id + target = "branch" + enforcement = "active" + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + # 'max_file_size' is a push-only rule, not valid for branch target + max_file_size { + max_file_size = 100 + } + } + } + `, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("rule .* is not valid for branch target"), + }, + }, + }) + }) + + t.Run("Validates tag target rejects push-only rules", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + config := fmt.Sprintf(` + resource "github_repository" "test" { + name = "tf-acc-test-tag-push-%s" + auto_init = true + vulnerability_alerts = true + + visibility = "private" + } + + resource "github_repository_ruleset" "test" { + name = "test-tag-push-rule" + repository = github_repository.test.id + target = "tag" + enforcement = "active" + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + # 'file_path_restriction' is a push-only rule, not valid for tag target + file_path_restriction { + restricted_file_paths = ["secrets/"] + } + } + } + `, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasPaidOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile("rule .* is not valid for tag target"), + }, + }, + }) + }) +} + +func TestAccGithubRepositoryRuleset_requiredReviewers(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + repoName := fmt.Sprintf("%srepo-ruleset-req-rev-%s", testResourcePrefix, randomID) + teamName := fmt.Sprintf("%steam-req-rev-%s", testResourcePrefix, randomID) + rulesetName := fmt.Sprintf("%s-ruleset-req-rev-%s", testResourcePrefix, randomID) + baseRepoVisibility := "public" + + if testAccConf.authMode == enterprise { + // This enables repos to be created even in GHEC EMU + baseRepoVisibility = "private" + } + + config := fmt.Sprintf(` +resource "github_repository" "test" { + name = "%s" + auto_init = true + visibility = "%s" + + ignore_vulnerability_alerts_during_read = true +} + +resource "github_team" "test" { + name = "%s" +} + +resource "github_team_repository" "test" { + team_id = github_team.test.id + repository = github_repository.test.name + permission = "push" +} + +resource "github_repository_ruleset" "test" { + name = "%s" + repository = github_repository.test.name + target = "branch" + enforcement = "active" + + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + pull_request { + allowed_merge_methods = ["merge", "squash"] + required_approving_review_count = 1 + + required_reviewers { + reviewer { + id = github_team.test.id + type = "Team" + } + file_patterns = ["*.go"] + minimum_approvals = 1 + } + } + } + + depends_on = [github_team_repository.test] +} +`, repoName, baseRepoVisibility, teamName, rulesetName) + + // Updated config: change minimum_approvals from 1 to 2 + configUpdated := fmt.Sprintf(` +resource "github_repository" "test" { + name = "%s" + auto_init = true + visibility = "%s" + + ignore_vulnerability_alerts_during_read = true +} + +resource "github_team" "test" { + name = "%s" +} + +resource "github_team_repository" "test" { + team_id = github_team.test.id + repository = github_repository.test.name + permission = "push" +} + +resource "github_repository_ruleset" "test" { + name = "%s" + repository = github_repository.test.name + target = "branch" + enforcement = "active" + + + conditions { + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + pull_request { + allowed_merge_methods = ["merge", "squash"] + required_approving_review_count = 1 + + required_reviewers { + reviewer { + id = github_team.test.id + type = "Team" + } + file_patterns = ["*.go"] + minimum_approvals = 2 + } + } + } + + depends_on = [github_team_repository.test] +} +`, repoName, baseRepoVisibility, teamName, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessHasOrgs(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("github_repository_ruleset.test", "name", rulesetName), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "target", "branch"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "enforcement", "active"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.pull_request.0.required_reviewers.#", "1"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.minimum_approvals", "1"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.file_patterns.#", "1"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.file_patterns.0", "*.go"), + resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.reviewer.0.type", "Team"), + ), + }, + { + Config: configUpdated, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("github_repository_ruleset.test", "rules.0.pull_request.0.required_reviewers.0.minimum_approvals", "2"), + ), + }, + { + ResourceName: "github_repository_ruleset.test", + ImportState: true, + ImportStateVerify: true, + ImportStateIdFunc: importRepositoryRulesetByResourcePaths("github_repository.test", "github_repository_ruleset.test"), + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + func importRepositoryRulesetByResourcePaths(repoLogicalName, rulesetLogicalName string) resource.ImportStateIdFunc { // test importing using an ID of the form : return func(s *terraform.State) (string, error) { diff --git a/github/util_rules.go b/github/util_rules.go index 734801598f..a73c47d679 100644 --- a/github/util_rules.go +++ b/github/util_rules.go @@ -1,17 +1,67 @@ package github import ( + "context" "log" "reflect" "sort" "github.com/google/go-github/v81/github" + "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) // This is a workaround for the SDK not setting the default value for the allowed_merge_methods field. var defaultPullRequestMergeMethods = []github.PullRequestMergeMethod{github.PullRequestMergeMethodMerge, github.PullRequestMergeMethodRebase, github.PullRequestMergeMethodSquash} +// requiredReviewersSchema returns the schema definition for required_reviewers block. +// This is shared between organization and repository rulesets. +func requiredReviewersSchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + Description: "Require specific reviewers to approve pull requests targeting matching branches. Note: This feature is in beta and subject to change.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "reviewer": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Description: "The reviewer that must review matching files.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "id": { + Type: schema.TypeInt, + Required: true, + Description: "The ID of the reviewer that must review.", + }, + "type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: toDiagFunc(validation.StringInSlice([]string{"Team"}, false), "type"), + Description: "The type of reviewer. Currently only `Team` is supported.", + }, + }, + }, + }, + "file_patterns": { + Type: schema.TypeList, + Required: true, + MinItems: 1, + Description: "File patterns (fnmatch syntax) that this reviewer must approve.", + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "minimum_approvals": { + Type: schema.TypeInt, + Required: true, + Description: "Minimum number of approvals required from this reviewer. Set to 0 to make approval optional.", + }, + }, + }, + } +} + // Helper function to safely convert interface{} to int, handling both int and float64. func toInt(v any) int { switch val := v.(type) { @@ -55,6 +105,72 @@ func toPullRequestMergeMethods(input any) []github.PullRequestMergeMethod { return mergeMethods } +// expandRequiredReviewers converts Terraform schema data to go-github RequiredReviewers. +func expandRequiredReviewers(input []any) []*github.RulesetRequiredReviewer { + if len(input) == 0 { + return nil + } + + reviewers := make([]*github.RulesetRequiredReviewer, 0, len(input)) + for _, item := range input { + reviewerMap := item.(map[string]any) + + var reviewer *github.RulesetReviewer + if rv, ok := reviewerMap["reviewer"].([]any); ok && len(rv) != 0 { + reviewerData := rv[0].(map[string]any) + reviewerType := github.RulesetReviewerType(reviewerData["type"].(string)) + reviewer = &github.RulesetReviewer{ + ID: github.Ptr(int64(reviewerData["id"].(int))), + Type: &reviewerType, + } + } + + filePatterns := make([]string, 0) + if fp, ok := reviewerMap["file_patterns"].([]any); ok { + for _, p := range fp { + filePatterns = append(filePatterns, p.(string)) + } + } + + reviewers = append(reviewers, &github.RulesetRequiredReviewer{ + MinimumApprovals: github.Ptr(reviewerMap["minimum_approvals"].(int)), + FilePatterns: filePatterns, + Reviewer: reviewer, + }) + } + return reviewers +} + +// flattenRequiredReviewers converts go-github RequiredReviewers to Terraform schema data. +func flattenRequiredReviewers(reviewers []*github.RulesetRequiredReviewer) []map[string]any { + if len(reviewers) == 0 { + return nil + } + + reviewersList := make([]map[string]any, 0, len(reviewers)) + for _, rr := range reviewers { + reviewerMap := map[string]any{ + "file_patterns": rr.FilePatterns, + "minimum_approvals": 0, + } + if rr.MinimumApprovals != nil { + reviewerMap["minimum_approvals"] = *rr.MinimumApprovals + } + if rr.Reviewer != nil { + reviewerData := map[string]any{} + if rr.Reviewer.ID != nil { + reviewerData["id"] = int(*rr.Reviewer.ID) + } + if rr.Reviewer.Type != nil { + reviewerData["type"] = string(*rr.Reviewer.Type) + } + reviewerMap["reviewer"] = []map[string]any{reviewerData} + } + reviewersList = append(reviewersList, reviewerMap) + } + return reviewersList +} + func resourceGithubRulesetObject(d *schema.ResourceData, org string) github.RepositoryRuleset { isOrgLevel := len(org) > 0 @@ -221,19 +337,26 @@ func expandConditions(input []any, org bool) *github.RepositoryRulesetConditions } func flattenConditions(conditions *github.RepositoryRulesetConditions, org bool) []any { - if conditions == nil || conditions.RefName == nil { + return flattenConditionsWithContext(context.TODO(), conditions, org) +} + +func flattenConditionsWithContext(ctx context.Context, conditions *github.RepositoryRulesetConditions, org bool) []any { + if conditions == nil || reflect.DeepEqual(conditions, &github.RepositoryRulesetConditions{}) { + tflog.Debug(ctx, "Conditions are empty, returning empty list") return []any{} } conditionsMap := make(map[string]any) refNameSlice := make([]map[string]any, 0) - refNameSlice = append(refNameSlice, map[string]any{ - "include": conditions.RefName.Include, - "exclude": conditions.RefName.Exclude, - }) + if conditions.RefName != nil { + refNameSlice = append(refNameSlice, map[string]any{ + "include": conditions.RefName.Include, + "exclude": conditions.RefName.Exclude, + }) - conditionsMap["ref_name"] = refNameSlice + conditionsMap["ref_name"] = refNameSlice + } // org-only fields if org { @@ -325,6 +448,12 @@ func expandRules(input []any, org bool) *github.RepositoryRulesetRules { RequiredReviewThreadResolution: pullRequestMap["required_review_thread_resolution"].(bool), AllowedMergeMethods: toPullRequestMergeMethods(allowedMergeMethods), } + + // Add required reviewers if provided + if reqReviewers, ok := pullRequestMap["required_reviewers"].([]any); ok && len(reqReviewers) != 0 { + params.RequiredReviewers = expandRequiredReviewers(reqReviewers) + } + rulesetRules.PullRequest = params } @@ -565,8 +694,9 @@ func flattenRules(rules *github.RepositoryRulesetRules, org bool) []any { "required_approving_review_count": rules.PullRequest.RequiredApprovingReviewCount, "required_review_thread_resolution": rules.PullRequest.RequiredReviewThreadResolution, "allowed_merge_methods": rules.PullRequest.AllowedMergeMethods, + "required_reviewers": flattenRequiredReviewers(rules.PullRequest.RequiredReviewers), }) - log.Printf("[DEBUG] Flattened Pull Request rules slice request slice: %#v", pullRequestSlice) + log.Printf("[DEBUG] Flattened Pull Request rules slice: %#v", pullRequestSlice) rulesMap["pull_request"] = pullRequestSlice } diff --git a/github/util_rules_test.go b/github/util_rules_test.go index 9c09fa03d4..003ee04773 100644 --- a/github/util_rules_test.go +++ b/github/util_rules_test.go @@ -418,3 +418,344 @@ func TestCompletePushRulesetSupport(t *testing.T) { t.Errorf("Expected 3 restricted file extensions, got %d", len(restrictedExts)) } } + +func TestFlattenConditions_PushRuleset_WithRepositoryNameOnly(t *testing.T) { + // Push rulesets don't use ref_name - they only have repository_name or repository_id. + // flattenConditions should return the conditions even when RefName is nil. + conditions := &github.RepositoryRulesetConditions{ + RefName: nil, // Push rulesets don't have ref_name + RepositoryName: &github.RepositoryRulesetRepositoryNamesConditionParameters{ + Include: []string{"~ALL"}, + Exclude: []string{}, + }, + } + + result := flattenConditions(conditions, true) // org=true for organization rulesets + + if len(result) != 1 { + t.Fatalf("Expected 1 conditions block, got %d", len(result)) + } + + conditionsMap := result[0].(map[string]any) + + // ref_name should be empty for push rulesets + refNameSlice := conditionsMap["ref_name"] + if refNameSlice != nil { + t.Fatalf("Expected ref_name to be nil, got %T", conditionsMap["ref_name"]) + } + + // repository_name should be present + repoNameSlice, ok := conditionsMap["repository_name"].([]map[string]any) + if !ok { + t.Fatalf("Expected repository_name to be []map[string]any, got %T", conditionsMap["repository_name"]) + } + if len(repoNameSlice) != 1 { + t.Fatalf("Expected 1 repository_name block, got %d", len(repoNameSlice)) + } + + include, ok := repoNameSlice[0]["include"].([]string) + if !ok { + t.Fatalf("Expected include to be []string, got %T", repoNameSlice[0]["include"]) + } + if len(include) != 1 || include[0] != "~ALL" { + t.Errorf("Expected include to be [~ALL], got %v", include) + } +} + +func TestFlattenConditions_BranchRuleset_WithRefNameAndRepositoryName(t *testing.T) { + // Branch/tag rulesets have both ref_name and repository_name. + // This test ensures we didn't break the existing behavior. + conditions := &github.RepositoryRulesetConditions{ + RefName: &github.RepositoryRulesetRefConditionParameters{ + Include: []string{"~DEFAULT_BRANCH", "refs/heads/main"}, + Exclude: []string{"refs/heads/experimental-*"}, + }, + RepositoryName: &github.RepositoryRulesetRepositoryNamesConditionParameters{ + Include: []string{"~ALL"}, + Exclude: []string{"test-*"}, + }, + } + + result := flattenConditions(conditions, true) // org=true for organization rulesets + + if len(result) != 1 { + t.Fatalf("Expected 1 conditions block, got %d", len(result)) + } + + conditionsMap := result[0].(map[string]any) + + // ref_name should be present for branch/tag rulesets + refNameSlice, ok := conditionsMap["ref_name"].([]map[string]any) + if !ok { + t.Fatalf("Expected ref_name to be []map[string]any, got %T", conditionsMap["ref_name"]) + } + if len(refNameSlice) != 1 { + t.Fatalf("Expected 1 ref_name block, got %d", len(refNameSlice)) + } + + refInclude, ok := refNameSlice[0]["include"].([]string) + if !ok { + t.Fatalf("Expected ref_name include to be []string, got %T", refNameSlice[0]["include"]) + } + if len(refInclude) != 2 { + t.Errorf("Expected 2 ref_name includes, got %d", len(refInclude)) + } + + refExclude, ok := refNameSlice[0]["exclude"].([]string) + if !ok { + t.Fatalf("Expected ref_name exclude to be []string, got %T", refNameSlice[0]["exclude"]) + } + if len(refExclude) != 1 { + t.Errorf("Expected 1 ref_name exclude, got %d", len(refExclude)) + } + + // repository_name should also be present + repoNameSlice, ok := conditionsMap["repository_name"].([]map[string]any) + if !ok { + t.Fatalf("Expected repository_name to be []map[string]any, got %T", conditionsMap["repository_name"]) + } + if len(repoNameSlice) != 1 { + t.Fatalf("Expected 1 repository_name block, got %d", len(repoNameSlice)) + } + + repoInclude, ok := repoNameSlice[0]["include"].([]string) + if !ok { + t.Fatalf("Expected repository_name include to be []string, got %T", repoNameSlice[0]["include"]) + } + if len(repoInclude) != 1 || repoInclude[0] != "~ALL" { + t.Errorf("Expected repository_name include to be [~ALL], got %v", repoInclude) + } + + repoExclude, ok := repoNameSlice[0]["exclude"].([]string) + if !ok { + t.Fatalf("Expected repository_name exclude to be []string, got %T", repoNameSlice[0]["exclude"]) + } + if len(repoExclude) != 1 || repoExclude[0] != "test-*" { + t.Errorf("Expected repository_name exclude to be [test-*], got %v", repoExclude) + } +} + +func TestFlattenConditions_PushRuleset_WithRepositoryIdOnly(t *testing.T) { + // Push rulesets can also use repository_id instead of repository_name. + conditions := &github.RepositoryRulesetConditions{ + RefName: nil, // Push rulesets don't have ref_name + RepositoryID: &github.RepositoryRulesetRepositoryIDsConditionParameters{ + RepositoryIDs: []int64{12345, 67890}, + }, + } + + result := flattenConditions(conditions, true) // org=true for organization rulesets + + if len(result) != 1 { + t.Fatalf("Expected 1 conditions block, got %d", len(result)) + } + + conditionsMap := result[0].(map[string]any) + + // ref_name should be nil for push rulesets + refNameSlice := conditionsMap["ref_name"] + if refNameSlice != nil { + t.Fatalf("Expected ref_name to be nil, got %T", conditionsMap["ref_name"]) + } + + // repository_id should be present + repoIDs, ok := conditionsMap["repository_id"].([]int64) + if !ok { + t.Fatalf("Expected repository_id to be []int64, got %T", conditionsMap["repository_id"]) + } + if len(repoIDs) != 2 { + t.Fatalf("Expected 2 repository IDs, got %d", len(repoIDs)) + } + if repoIDs[0] != 12345 || repoIDs[1] != 67890 { + t.Errorf("Expected repository IDs [12345, 67890], got %v", repoIDs) + } +} + +func TestExpandRequiredReviewers(t *testing.T) { + input := []any{ + map[string]any{ + "reviewer": []any{ + map[string]any{ + "id": 12345, + "type": "Team", + }, + }, + "file_patterns": []any{"*.go", "src/**/*.ts"}, + "minimum_approvals": 2, + }, + map[string]any{ + "reviewer": []any{ + map[string]any{ + "id": 67890, + "type": "Team", + }, + }, + "file_patterns": []any{"docs/**/*.md"}, + "minimum_approvals": 1, + }, + } + + result := expandRequiredReviewers(input) + + if len(result) != 2 { + t.Fatalf("Expected 2 reviewers, got %d", len(result)) + } + + // Check first reviewer + if result[0].Reviewer == nil { + t.Fatal("Expected first reviewer to have a Reviewer") + } + if *result[0].Reviewer.ID != 12345 { + t.Errorf("Expected first reviewer ID to be 12345, got %d", *result[0].Reviewer.ID) + } + if *result[0].Reviewer.Type != github.RulesetReviewerTypeTeam { + t.Errorf("Expected first reviewer type to be Team, got %s", *result[0].Reviewer.Type) + } + if *result[0].MinimumApprovals != 2 { + t.Errorf("Expected first reviewer minimum approvals to be 2, got %d", *result[0].MinimumApprovals) + } + if len(result[0].FilePatterns) != 2 { + t.Fatalf("Expected first reviewer to have 2 file patterns, got %d", len(result[0].FilePatterns)) + } + if result[0].FilePatterns[0] != "*.go" || result[0].FilePatterns[1] != "src/**/*.ts" { + t.Errorf("Unexpected file patterns for first reviewer: %v", result[0].FilePatterns) + } + + // Check second reviewer + if result[1].Reviewer == nil { + t.Fatal("Expected second reviewer to have a Reviewer") + } + if *result[1].Reviewer.ID != 67890 { + t.Errorf("Expected second reviewer ID to be 67890, got %d", *result[1].Reviewer.ID) + } + if *result[1].MinimumApprovals != 1 { + t.Errorf("Expected second reviewer minimum approvals to be 1, got %d", *result[1].MinimumApprovals) + } +} + +func TestExpandRequiredReviewersEmpty(t *testing.T) { + result := expandRequiredReviewers([]any{}) + if result != nil { + t.Error("Expected nil for empty input") + } + + result = expandRequiredReviewers(nil) + if result != nil { + t.Error("Expected nil for nil input") + } +} + +func TestFlattenRequiredReviewers(t *testing.T) { + reviewerType := github.RulesetReviewerTypeTeam + reviewers := []*github.RulesetRequiredReviewer{ + { + MinimumApprovals: github.Ptr(2), + FilePatterns: []string{"*.go", "src/**/*.ts"}, + Reviewer: &github.RulesetReviewer{ + ID: github.Ptr(int64(12345)), + Type: &reviewerType, + }, + }, + { + MinimumApprovals: github.Ptr(1), + FilePatterns: []string{"docs/**/*.md"}, + Reviewer: &github.RulesetReviewer{ + ID: github.Ptr(int64(67890)), + Type: &reviewerType, + }, + }, + } + + result := flattenRequiredReviewers(reviewers) + + if len(result) != 2 { + t.Fatalf("Expected 2 reviewers, got %d", len(result)) + } + + // Check first reviewer + if result[0]["minimum_approvals"] != 2 { + t.Errorf("Expected first reviewer minimum approvals to be 2, got %v", result[0]["minimum_approvals"]) + } + filePatterns := result[0]["file_patterns"].([]string) + if len(filePatterns) != 2 { + t.Fatalf("Expected first reviewer to have 2 file patterns, got %d", len(filePatterns)) + } + if filePatterns[0] != "*.go" || filePatterns[1] != "src/**/*.ts" { + t.Errorf("Unexpected file patterns for first reviewer: %v", filePatterns) + } + + reviewerBlock := result[0]["reviewer"].([]map[string]any) + if len(reviewerBlock) != 1 { + t.Fatalf("Expected 1 reviewer block, got %d", len(reviewerBlock)) + } + if reviewerBlock[0]["id"] != 12345 { + t.Errorf("Expected first reviewer ID to be 12345, got %v", reviewerBlock[0]["id"]) + } + if reviewerBlock[0]["type"] != "Team" { + t.Errorf("Expected first reviewer type to be Team, got %v", reviewerBlock[0]["type"]) + } + + // Check second reviewer + if result[1]["minimum_approvals"] != 1 { + t.Errorf("Expected second reviewer minimum approvals to be 1, got %v", result[1]["minimum_approvals"]) + } +} + +func TestFlattenRequiredReviewersEmpty(t *testing.T) { + result := flattenRequiredReviewers(nil) + if result != nil { + t.Error("Expected nil for nil input") + } + + result = flattenRequiredReviewers([]*github.RulesetRequiredReviewer{}) + if result != nil { + t.Error("Expected nil for empty slice input") + } +} + +func TestRoundTripRequiredReviewers(t *testing.T) { + // Start with Terraform-style input + input := []any{ + map[string]any{ + "reviewer": []any{ + map[string]any{ + "id": 12345, + "type": "Team", + }, + }, + "file_patterns": []any{"*.go", "src/**/*.ts"}, + "minimum_approvals": 2, + }, + } + + // Expand to go-github types + expanded := expandRequiredReviewers(input) + + // Flatten back to Terraform types + flattened := flattenRequiredReviewers(expanded) + + // Verify the round trip maintains data + if len(flattened) != 1 { + t.Fatalf("Expected 1 reviewer after round trip, got %d", len(flattened)) + } + + if flattened[0]["minimum_approvals"] != 2 { + t.Errorf("Expected minimum_approvals to be 2 after round trip, got %v", flattened[0]["minimum_approvals"]) + } + + filePatterns := flattened[0]["file_patterns"].([]string) + if len(filePatterns) != 2 { + t.Fatalf("Expected 2 file patterns after round trip, got %d", len(filePatterns)) + } + + reviewerBlock := flattened[0]["reviewer"].([]map[string]any) + if len(reviewerBlock) != 1 { + t.Fatalf("Expected 1 reviewer block after round trip, got %d", len(reviewerBlock)) + } + if reviewerBlock[0]["id"] != 12345 { + t.Errorf("Expected reviewer ID to be 12345 after round trip, got %v", reviewerBlock[0]["id"]) + } + if reviewerBlock[0]["type"] != "Team" { + t.Errorf("Expected reviewer type to be Team after round trip, got %v", reviewerBlock[0]["type"]) + } +} diff --git a/website/docs/r/organization_ruleset.html.markdown b/website/docs/r/organization_ruleset.html.markdown index e04118f678..3c31e9aa3b 100644 --- a/website/docs/r/organization_ruleset.html.markdown +++ b/website/docs/r/organization_ruleset.html.markdown @@ -65,24 +65,23 @@ resource "github_organization_ruleset" "example" { } } -# Example with push ruleset +# Example with push ruleset +# Note: Push targets must NOT have ref_name in conditions, only repository_name or repository_id resource "github_organization_ruleset" "example_push" { name = "example_push" target = "push" enforcement = "active" conditions { - ref_name { - include = ["~ALL"] - exclude = [] - } repository_name { - include = ["~ALL"] + include = ["~ALL"] exclude = [] } } rules { + # Push targets only support these rules: + # file_path_restriction, max_file_size, max_file_path_length, file_extension_restriction file_path_restriction { restricted_file_paths = [".github/workflows/*", "*.env"] } @@ -104,225 +103,247 @@ resource "github_organization_ruleset" "example_push" { ## Argument Reference -* `enforcement` - (Required) (String) Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`. +- `enforcement` - (Required) (String) Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`. -* `name` - (Required) (String) The name of the ruleset. +- `name` - (Required) (String) The name of the ruleset. -* `rules` - (Required) (Block List, Min: 1, Max: 1) Rules within the ruleset. (see [below for nested schema](#rules)) +- `rules` - (Required) (Block List, Min: 1, Max: 1) Rules within the ruleset. (see [below for nested schema](#rules)) -* `target` - (Required) (String) Possible values are `branch`, `tag` and `push`. +- `target` - (Required) (String) Possible values are `branch`, `tag` and `push`. -* `bypass_actors` - (Optional) (Block List) The actors that can bypass the rules in this ruleset. (see [below for nested schema](#bypass_actors)) +- `bypass_actors` - (Optional) (Block List) The actors that can bypass the rules in this ruleset. (see [below for nested schema](#bypass_actors)) -* `conditions` - (Optional) (Block List, Max: 1) Parameters for an organization ruleset condition. `ref_name` is required alongside one of `repository_name` or `repository_id`. (see [below for nested schema](#conditions)) +- `conditions` - (Optional) (Block List, Max: 1) Parameters for an organization ruleset condition. For `branch` and `tag` targets, `ref_name` is required alongside one of `repository_name` or `repository_id`. For `push` targets, `ref_name` must NOT be set - only `repository_name` or `repository_id` should be used. (see [below for nested schema](#conditions)) #### Rules #### The `rules` block supports the following: -* `branch_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the branch_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `tag_name_pattern` as it only applies to rulesets with target `branch`. (see [below for nested schema](#rules.branch_name_pattern)) +~> **Note:** Rules are target-specific. `branch` and `tag` targets support rules like `creation`, `deletion`, `pull_request`, `required_status_checks`, etc. `push` targets only support `file_path_restriction`, `max_file_size`, `max_file_path_length`, and `file_extension_restriction`. Using the wrong rules for a target will result in a validation error. -* `commit_author_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_author_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rules.commit_author_email_pattern)) +- `branch_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the branch_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `tag_name_pattern` as it only applies to rulesets with target `branch`. (see [below for nested schema](#rules.branch_name_pattern)) -* `commit_message_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_message_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rules.commit_message_pattern)) +- `commit_author_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_author_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rules.commit_author_email_pattern)) -* `committer_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the committer_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rules.committer_email_pattern)) +- `commit_message_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_message_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rules.commit_message_pattern)) -* `creation` - (Optional) (Boolean) Only allow users with bypass permission to create matching refs. +- `committer_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the committer_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rules.committer_email_pattern)) -* `deletion` - (Optional) (Boolean) Only allow users with bypass permissions to delete matching refs. +- `creation` - (Optional) (Boolean) Only allow users with bypass permission to create matching refs. -* `non_fast_forward` - (Optional) (Boolean) Prevent users with push access from force pushing to branches. +- `deletion` - (Optional) (Boolean) Only allow users with bypass permissions to delete matching refs. -* `pull_request` - (Optional) (Block List, Max: 1) Require all commits be made to a non-target branch and submitted via a pull request before they can be merged. (see [below for nested schema](#rules.pull_request)) +- `non_fast_forward` - (Optional) (Boolean) Prevent users with push access from force pushing to branches. -* `required_linear_history` - (Optional) (Boolean) Prevent merge commits from being pushed to matching branches. +- `pull_request` - (Optional) (Block List, Max: 1) Require all commits be made to a non-target branch and submitted via a pull request before they can be merged. (see [below for nested schema](#rules.pull_request)) -* `required_signatures` - (Optional) (Boolean) Commits pushed to matching branches must have verified signatures. +- `required_linear_history` - (Optional) (Boolean) Prevent merge commits from being pushed to matching branches. -* `required_status_checks` - (Optional) (Block List, Max: 1) Choose which status checks must pass before branches can be merged into a branch that matches this rule. When enabled, commits must first be pushed to another branch, then merged or pushed directly to a branch that matches this rule after status checks have passed. (see [below for nested schema](#rules.required_status_checks)) +- `required_signatures` - (Optional) (Boolean) Commits pushed to matching branches must have verified signatures. -* `required_workflows` - (Optional) (Block List, Max: 1) Define which Actions workflows must pass before changes can be merged into a branch matching the rule. Multiple workflows can be specified. (see [below for nested schema](#rules.required_workflows)) +- `required_status_checks` - (Optional) (Block List, Max: 1) Choose which status checks must pass before branches can be merged into a branch that matches this rule. When enabled, commits must first be pushed to another branch, then merged or pushed directly to a branch that matches this rule after status checks have passed. (see [below for nested schema](#rules.required_status_checks)) -* `required_code_scanning` - (Optional) (Block List, Max: 1) Define which tools must provide code scanning results before the reference is updated. When configured, code scanning must be enabled and have results for both the commit and the reference being updated. Multiple code scanning tools can be specified. (see [below for nested schema](#rules.required_code_scanning)) +- `required_workflows` - (Optional) (Block List, Max: 1) Define which Actions workflows must pass before changes can be merged into a branch matching the rule. Multiple workflows can be specified. (see [below for nested schema](#rules.required_workflows)) -* `tag_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the tag_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `branch_name_pattern` as it only applies to rulesets with target `tag`. (see [below for nested schema](#rules.tag_name_pattern)) +- `required_code_scanning` - (Optional) (Block List, Max: 1) Define which tools must provide code scanning results before the reference is updated. When configured, code scanning must be enabled and have results for both the commit and the reference being updated. Multiple code scanning tools can be specified. (see [below for nested schema](#rules.required_code_scanning)) -* `file_path_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include changes to specified file paths from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.file_path_restriction)) +- `tag_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the tag_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `branch_name_pattern` as it only applies to rulesets with target `tag`. (see [below for nested schema](#rules.tag_name_pattern)) -* `max_file_size` - (Optional) (Block List, Max: 1) Prevent commits that include files with a specified file size from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.max_file_size)) +- `file_path_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include changes to specified file paths from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.file_path_restriction)) -* `max_file_path_length` - (Optional) (Block List, Max: 1) Prevent commits that include file paths that exceed a specified character limit from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.max_file_path_length)) +- `max_file_size` - (Optional) (Block List, Max: 1) Prevent commits that include files with a specified file size from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.max_file_size)) -* `file_extension_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include files with specified file extensions from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.file_extension_restriction)) +- `max_file_path_length` - (Optional) (Block List, Max: 1) Prevent commits that include file paths that exceed a specified character limit from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.max_file_path_length)) -* `update` - (Optional) (Boolean) Only allow users with bypass permission to update matching refs. +- `file_extension_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include files with specified file extensions from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.file_extension_restriction)) + +- `update` - (Optional) (Boolean) Only allow users with bypass permission to update matching refs. #### rules.branch_name_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.commit_author_email_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.commit_message_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.committer_email_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.pull_request #### -* `dismiss_stale_reviews_on_push` - (Optional) (Boolean) New, reviewable commits pushed will dismiss previous pull request review approvals. Defaults to `false`. +- `dismiss_stale_reviews_on_push` - (Optional) (Boolean) New, reviewable commits pushed will dismiss previous pull request review approvals. Defaults to `false`. + +- `require_code_owner_review` - (Optional) (Boolean) Require an approving review in pull requests that modify files that have a designated code owner. Defaults to `false`. + +- `require_last_push_approval` - (Optional) (Boolean) Whether the most recent reviewable push must be approved by someone other than the person who pushed it. Defaults to `false`. + +- `required_approving_review_count` - (Optional) (Number) The number of approving reviews that are required before a pull request can be merged. Defaults to `0`. -* `require_code_owner_review` - (Optional) (Boolean) Require an approving review in pull requests that modify files that have a designated code owner. Defaults to `false`. +- `required_review_thread_resolution` - (Optional) (Boolean) All conversations on code must be resolved before a pull request can be merged. Defaults to `false`. -* `require_last_push_approval` - (Optional) (Boolean) Whether the most recent reviewable push must be approved by someone other than the person who pushed it. Defaults to `false`. +- `allowed_merge_methods` - (Required) (List of String, Min: 1) Array of merge methods to be allowed. Allowed values include `merge`, `squash`, and `rebase`. At least one must be enabled. -* `required_approving_review_count` - (Optional) (Number) The number of approving reviews that are required before a pull request can be merged. Defaults to `0`. +- `required_reviewers` - (Optional) (Block List) Require specific reviewers to approve pull requests. Note: This feature is in beta. (see [below for nested schema](#rules.pull_request.required_reviewers)) -* `required_review_thread_resolution` - (Optional) (Boolean) All conversations on code must be resolved before a pull request can be merged. Defaults to `false`. +#### rules.pull_request.required_reviewers #### + +- `reviewer` - (Required) (Block List, Max: 1) The reviewer that must review matching files. (see [below for nested schema](#rules.pull_request.required_reviewers.reviewer)) + +- `file_patterns` - (Required) (List of String) File patterns (fnmatch syntax) that this reviewer must approve. + +- `minimum_approvals` - (Required) (Number) Minimum number of approvals required from this reviewer. Set to 0 to make approval optional. + +#### rules.pull_request.required_reviewers.reviewer #### + +- `id` - (Required) (Number) The ID of the reviewer (Team ID). + +- `type` - (Required) (String) The type of reviewer. Currently only `Team` is supported. #### rules.required_status_checks #### -* `required_check` - (Required) (Block Set, Min: 1) Status checks that are required. Several can be defined. (see [below for nested schema](#rules.required_status_checks.required_check)) +- `required_check` - (Required) (Block Set, Min: 1) Status checks that are required. Several can be defined. (see [below for nested schema](#rules.required_status_checks.required_check)) -* `strict_required_status_checks_policy` - (Optional) (Boolean) Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. Defaults to `false`. +- `strict_required_status_checks_policy` - (Optional) (Boolean) Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. Defaults to `false`. -* `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. +- `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. #### required_status_checks.required_check #### -* `context` - (Required) (String) The status check context name that must be present on the commit. - -* `integration_id` - (Optional) (Number) The optional integration ID that this status check must originate from. +- `context` - (Required) (String) The status check context name that must be present on the commit. -* `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. +- `integration_id` - (Optional) (Number) The optional integration ID that this status check must originate from. #### rules.required_workflows #### -* `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. +- `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. -* `required_workflow` - (Required) (Block Set, Min: 1) Actions workflows that are required. Multiple can be defined. (see [below for nested schema](#rules.required_workflows.required_workflow)) +- `required_workflow` - (Required) (Block Set, Min: 1) Actions workflows that are required. Multiple can be defined. (see [below for nested schema](#rules.required_workflows.required_workflow)) #### rules.required_workflows.required_workflow #### -* `repository_id` - (Required) (Number) The ID of the repository. Names, full names and repository URLs are not supported. +- `repository_id` - (Required) (Number) The ID of the repository. Names, full names and repository URLs are not supported. -* `path` - (Required) (String) The path to the YAML definition file of the workflow. +- `path` - (Required) (String) The path to the YAML definition file of the workflow. -* `ref` - (Optional) (String) The optional ref from which to fetch the workflow. Defaults to `master`. +- `ref` - (Optional) (String) The optional ref from which to fetch the workflow. Defaults to `master`. #### rules.required_code_scanning #### -* `required_code_scanning_tool` - (Required) (Block Set, Min: 1) Actions code scanning tools that are required. Multiple can be defined. (see [below for nested schema](#rules.required_workflows.required_code_scanning_tool)) +- `required_code_scanning_tool` - (Required) (Block Set, Min: 1) Actions code scanning tools that are required. Multiple can be defined. (see [below for nested schema](#rules.required_code_scanning.required_code_scanning_tool)) #### rules.required_code_scanning.required_code_scanning_tool #### -* `alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise alerts block a reference update. Can be one of: `none`, `errors`, `errors_and_warnings`, `all`. +- `alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise alerts block a reference update. Can be one of: `none`, `errors`, `errors_and_warnings`, `all`. -* `security_alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise security alerts block a reference update. Can be one of: `none`, `critical`, `high_or_higher`, `medium_or_higher`, `all`. +- `security_alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise security alerts block a reference update. Can be one of: `none`, `critical`, `high_or_higher`, `medium_or_higher`, `all`. -* `tool` - (Required) (String) The name of a code scanning tool. +- `tool` - (Required) (String) The name of a code scanning tool. #### rules.tag_name_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.file_path_restriction #### -* `restricted_file_paths` - (Required) (Block Set, Min: 1) The file paths that are restricted from being pushed to the commit graph. +- `restricted_file_paths` - (Required) (Block Set, Min: 1) The file paths that are restricted from being pushed to the commit graph. #### rules.max_file_size #### -* `max_file_size` - (Required) (Integer) The maximum allowed size, in megabytes (MB), of a file. Valid range is 1-100 MB. +- `max_file_size` - (Required) (Integer) The maximum allowed size, in megabytes (MB), of a file. Valid range is 1-100 MB. #### rules.max_file_path_length #### -* `max_file_path_length` - (Required) (Integer) The maximum number of characters allowed in file paths. +- `max_file_path_length` - (Required) (Integer) The maximum number of characters allowed in file paths. #### rules.file_extension_restriction #### -* `restricted_file_extensions` - (Required) (Block Set, Min: 1) The file extensions that are restricted from being pushed to the commit graph. +- `restricted_file_extensions` - (Required) (Block Set, Min: 1) The file extensions that are restricted from being pushed to the commit graph. #### bypass_actors #### -* `actor_id` - (Required) (Number) The ID of the actor that can bypass a ruleset. +- `actor_id` - (Optional) (Number) The ID of the actor that can bypass a ruleset. Some actor types such as `DeployKey` do not have an ID. -* `actor_type` (String) The type of actor that can bypass a ruleset. Can be one of: `RepositoryRole`, `Team`, `Integration`, `OrganizationAdmin`. +- `actor_type` (String) The type of actor that can bypass a ruleset. Can be one of: `RepositoryRole`, `Team`, `Integration`, `OrganizationAdmin`, `DeployKey`. -* `bypass_mode` - (Optional) (String) When the specified actor can bypass the ruleset. pull_request means that an actor can only bypass rules on pull requests. Can be one of: `always`, `pull_request`, `exempt`. +- `bypass_mode` - (Optional) (String) When the specified actor can bypass the ruleset. pull_request means that an actor can only bypass rules on pull requests. Can be one of: `always`, `pull_request`, `exempt`. ~>Note: at the time of writing this, the following actor types correspond to the following actor IDs: -* `OrganizationAdmin` -> `1` -* `RepositoryRole` (This is the actor type, the following are the base repository roles and their associated IDs.) - * `maintain` -> `2` - * `write` -> `4` - * `admin` -> `5` +- `OrganizationAdmin` -> `1` +- `RepositoryRole` (This is the actor type, the following are the base repository roles and their associated IDs.) + - `maintain` -> `2` + - `write` -> `4` + - `admin` -> `5` #### conditions #### -* `ref_name` - (Required) (Block List, Min: 1, Max: 1) (see [below for nested schema](#conditions.ref_name)) -* `repository_id` (Optional) (List of Number) The repository IDs that the ruleset applies to. One of these IDs must match for the condition to pass. Conflicts with `repository_name`. -* `repository_name` (Optional) (Block List, Max: 1) Conflicts with `repository_id`. (see [below for nested schema](#conditions.repository_name)) +- `ref_name` - (Optional) (Block List, Max: 1) Required for `branch` and `tag` targets. Must NOT be set for `push` targets. (see [below for nested schema](#conditions.ref_name)) +- `repository_id` (Optional) (List of Number) The repository IDs that the ruleset applies to. One of these IDs must match for the condition to pass. Conflicts with `repository_name`. +- `repository_name` (Optional) (Block List, Max: 1) Conflicts with `repository_id`. (see [below for nested schema](#conditions.repository_name)) One of `repository_id` and `repository_name` must be set for the rule to target any repositories. +~> **Note:** For `push` targets, do not include `ref_name` in conditions. Push rulesets operate on file content, not on refs. + #### conditions.ref_name #### -* `exclude` - (Required) (List of String) Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match. +- `exclude` - (Required) (List of String) Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match. -* `include` - (Required) (List of String) Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches. +- `include` - (Required) (List of String) Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches. #### conditions.repository_name #### -* `exclude` - (Required) (List of String) Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match. +- `exclude` - (Required) (List of String) Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match. + +- `include` - (Required) (List of String) Array of repository names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all repositories. -* `include` - (Required) (List of String) Array of repository names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all repositories. +- `protected` - (Optional) (Boolean) Whether renaming of target repositories is prevented. Defaults to `false`. ## Attributes Reference The following additional attributes are exported: -* `etag` (String) +- `etag` (String) -* `node_id` (String) GraphQL global node id for use with v4 API. +- `node_id` (String) GraphQL global node id for use with v4 API. -* `ruleset_id` (Number) GitHub ID for the ruleset. +- `ruleset_id` (Number) GitHub ID for the ruleset. ## Import diff --git a/website/docs/r/repository_ruleset.html.markdown b/website/docs/r/repository_ruleset.html.markdown index 61c5c733cb..5c33e19473 100644 --- a/website/docs/r/repository_ruleset.html.markdown +++ b/website/docs/r/repository_ruleset.html.markdown @@ -88,224 +88,246 @@ resource "github_repository_ruleset" "example_push" { ## Argument Reference -* `enforcement` - (Required) (String) Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`. +- `enforcement` - (Required) (String) Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`. -* `name` - (Required) (String) The name of the ruleset. +- `name` - (Required) (String) The name of the ruleset. -* `rules` - (Required) (Block List, Min: 1, Max: 1) Rules within the ruleset. (see [below for nested schema](#rules)) +- `rules` - (Required) (Block List, Min: 1, Max: 1) Rules within the ruleset. (see [below for nested schema](#rules)) -* `target` - (Required) (String) Possible values are `branch`, `tag` and `push`. +- `target` - (Required) (String) Possible values are `branch`, `tag` and `push`. -* `bypass_actors` - (Optional) (Block List) The actors that can bypass the rules in this ruleset. (see [below for nested schema](#bypass_actors)) +- `bypass_actors` - (Optional) (Block List) The actors that can bypass the rules in this ruleset. (see [below for nested schema](#bypass_actors)) -* `conditions` - (Optional) (Block List, Max: 1) Parameters for a repository ruleset ref name condition. (see [below for nested schema](#conditions)) +- `conditions` - (Optional) (Block List, Max: 1) Parameters for a repository ruleset condition. For `branch` and `tag` targets, `ref_name` is required. For `push` targets, `ref_name` must NOT be set - conditions are optional for push targets. (see [below for nested schema](#conditions)) -* `repository` - (Required) (String) Name of the repository to apply ruleset to. +- `repository` - (Required) (String) Name of the repository to apply ruleset to. #### Rules #### The `rules` block supports the following: -* `branch_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the branch_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `tag_name_pattern` as it only applied to rulesets with target `branch`. (see [below for nested schema](#rulesbranch_name_pattern)) +~> **Note:** Rules are target-specific. `branch` and `tag` targets support rules like `creation`, `deletion`, `pull_request`, `required_status_checks`, etc. `push` targets only support `file_path_restriction`, `max_file_size`, `max_file_path_length`, and `file_extension_restriction`. Using the wrong rules for a target will result in a validation error. -* `commit_author_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_author_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rulescommit_author_email_pattern)) +- `branch_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the branch_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `tag_name_pattern` as it only applied to rulesets with target `branch`. (see [below for nested schema](#rulesbranch_name_pattern)) -* `commit_message_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_message_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rulescommit_message_pattern)) +- `commit_author_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_author_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rulescommit_author_email_pattern)) -* `committer_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the committer_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rulescommitter_email_pattern)) +- `commit_message_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_message_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rulescommit_message_pattern)) -* `creation` - (Optional) (Boolean) Only allow users with bypass permission to create matching refs. +- `committer_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the committer_email_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. (see [below for nested schema](#rulescommitter_email_pattern)) -* `deletion` - (Optional) (Boolean) Only allow users with bypass permissions to delete matching refs. +- `creation` - (Optional) (Boolean) Only allow users with bypass permission to create matching refs. -* `non_fast_forward` - (Optional) (Boolean) Prevent users with push access from force pushing to branches. +- `deletion` - (Optional) (Boolean) Only allow users with bypass permissions to delete matching refs. -* `merge_queue` - (Optional) (Block List, Max: 1) Merges must be performed via a merge queue. (see [below for nested schema](#rules.merge_queue)) +- `non_fast_forward` - (Optional) (Boolean) Prevent users with push access from force pushing to branches. -* `pull_request` - (Optional) (Block List, Max: 1) Require all commits be made to a non-target branch and submitted via a pull request before they can be merged. (see [below for nested schema](#rulespull_request)) +- `merge_queue` - (Optional) (Block List, Max: 1) Merges must be performed via a merge queue. (see [below for nested schema](#rules.merge_queue)) -* `required_deployments` - (Optional) (Block List, Max: 1) Choose which environments must be successfully deployed to before branches can be merged into a branch that matches this rule. (see [below for nested schema](#rulesrequired_deployments)) +- `pull_request` - (Optional) (Block List, Max: 1) Require all commits be made to a non-target branch and submitted via a pull request before they can be merged. (see [below for nested schema](#rulespull_request)) -* `required_linear_history` - (Optional) (Boolean) Prevent merge commits from being pushed to matching branches. +- `required_deployments` - (Optional) (Block List, Max: 1) Choose which environments must be successfully deployed to before branches can be merged into a branch that matches this rule. (see [below for nested schema](#rulesrequired_deployments)) -* `required_signatures` - (Optional) (Boolean) Commits pushed to matching branches must have verified signatures. +- `required_linear_history` - (Optional) (Boolean) Prevent merge commits from being pushed to matching branches. -* `required_status_checks` - (Optional) (Block List, Max: 1) Choose which status checks must pass before branches can be merged into a branch that matches this rule. When enabled, commits must first be pushed to another branch, then merged or pushed directly to a branch that matches this rule after status checks have passed. (see [below for nested schema](#rulesrequired_status_checks)) +- `required_signatures` - (Optional) (Boolean) Commits pushed to matching branches must have verified signatures. -* `tag_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the tag_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `branch_name_pattern` as it only applied to rulesets with target `tag`. (see [below for nested schema](#rulestag_name_pattern)) +- `required_status_checks` - (Optional) (Block List, Max: 1) Choose which status checks must pass before branches can be merged into a branch that matches this rule. When enabled, commits must first be pushed to another branch, then merged or pushed directly to a branch that matches this rule after status checks have passed. (see [below for nested schema](#rulesrequired_status_checks)) -* `required_code_scanning` - (Optional) (Block List, Max: 1) Define which tools must provide code scanning results before the reference is updated. When configured, code scanning must be enabled and have results for both the commit and the reference being updated. Multiple code scanning tools can be specified. (see [below for nested schema](#rulesrequired_code_scanning)) +- `tag_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the tag_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `branch_name_pattern` as it only applied to rulesets with target `tag`. (see [below for nested schema](#rulestag_name_pattern)) -* `file_path_restriction` - (Optional) (Block List, Max 1) Parameters to be used for the file_path_restriction rule. When enabled restricts access to files within the repository. (See [below for nested schema](#rules.file_path_restriction)) +- `required_code_scanning` - (Optional) (Block List, Max: 1) Define which tools must provide code scanning results before the reference is updated. When configured, code scanning must be enabled and have results for both the commit and the reference being updated. Multiple code scanning tools can be specified. (see [below for nested schema](#rulesrequired_code_scanning)) -* `max_file_size` - (Optional) (Block List, Max 1) Parameters to be used for the max_file_size rule. When enabled restricts the maximum size of a file that can be pushed to the repository. (See [below for nested schema](#rules.max_file_size)) +- `file_path_restriction` - (Optional) (Block List, Max 1) Parameters to be used for the file_path_restriction rule. This rule only applies to rulesets with target `push`. (See [below for nested schema](#rules.file_path_restriction)) -* `max_file_path_length` - (Optional) (Block List, Max: 1) Prevent commits that include file paths that exceed a specified character limit from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.max_file_path_length)) +- `max_file_size` - (Optional) (Block List, Max 1) Parameters to be used for the max_file_size rule. This rule only applies to rulesets with target `push`. (See [below for nested schema](#rules.max_file_size)) -* `file_extension_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include files with specified file extensions from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.file_extension_restriction)) -* `update` - (Optional) (Boolean) Only allow users with bypass permission to update matching refs. +- `max_file_path_length` - (Optional) (Block List, Max: 1) Prevent commits that include file paths that exceed a specified character limit from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.max_file_path_length)) -* `update_allows_fetch_and_merge` - (Optional) (Boolean) Branch can pull changes from its upstream repository. This is only applicable to forked repositories. Requires `update` to be set to `true`. Note: behaviour is affected by a known bug on the GitHub side which may cause issues when using this parameter. +- `file_extension_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include files with specified file extensions from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rules.file_extension_restriction)) +- `update` - (Optional) (Boolean) Only allow users with bypass permission to update matching refs. + +- `update_allows_fetch_and_merge` - (Optional) (Boolean) Branch can pull changes from its upstream repository. This is only applicable to forked repositories. Requires `update` to be set to `true`. Note: behaviour is affected by a known bug on the GitHub side which may cause issues when using this parameter. #### rules.branch_name_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.commit_author_email_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.commit_message_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.committer_email_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.merge_queue #### -* `check_response_timeout_minutes` - (Required) (Number)Maximum time for a required status check to report a conclusion. After this much time has elapsed, checks that have not reported a conclusion will be assumed to have failed. Defaults to `60`. +- `check_response_timeout_minutes` - (Optional) (Number) Maximum time for a required status check to report a conclusion. After this much time has elapsed, checks that have not reported a conclusion will be assumed to have failed. Defaults to `60`. -* `grouping_strategy` - (Required) (String)When set to ALLGREEN, the merge commit created by merge queue for each PR in the group must pass all required checks to merge. When set to HEADGREEN, only the commit at the head of the merge group, i.e. the commit containing changes from all of the PRs in the group, must pass its required checks to merge. Can be one of: ALLGREEN, HEADGREEN. Defaults to `ALLGREEN`. +- `grouping_strategy` - (Optional) (String) When set to ALLGREEN, the merge commit created by merge queue for each PR in the group must pass all required checks to merge. When set to HEADGREEN, only the commit at the head of the merge group, i.e. the commit containing changes from all of the PRs in the group, must pass its required checks to merge. Can be one of: `ALLGREEN`, `HEADGREEN`. Defaults to `ALLGREEN`. -* `max_entries_to_build` - (Required) (Number) Limit the number of queued pull requests requesting checks and workflow runs at the same time. Defaults to `5`. +- `max_entries_to_build` - (Optional) (Number) Limit the number of queued pull requests requesting checks and workflow runs at the same time. Defaults to `5`. -* `max_entries_to_merge` - (Required) (Number) Limit the number of queued pull requests that will be merged together in a group. Defaults to `5`. +- `max_entries_to_merge` - (Optional) (Number) Limit the number of queued pull requests that will be merged together in a group. Defaults to `5`. -* `merge_method` - (Required) (String) Method to use when merging changes from queued pull requests. Can be one of: MERGE, SQUASH, REBASE. Defaults to `MERGE`. +- `merge_method` - (Optional) (String) Method to use when merging changes from queued pull requests. Can be one of: `MERGE`, `SQUASH`, `REBASE`. Defaults to `MERGE`. -* `min_entries_to_merge` - (Required) (Number) The minimum number of PRs that will be merged together in a group. Defaults to `1`. +- `min_entries_to_merge` - (Optional) (Number) The minimum number of PRs that will be merged together in a group. Defaults to `1`. -* `min_entries_to_merge_wait_minutes` - (Required) (Number) The time merge queue should wait after the first PR is added to the queue for the minimum group size to be met. After this time has elapsed, the minimum group size will be ignored and a smaller group will be merged. Defaults to `5`. +- `min_entries_to_merge_wait_minutes` - (Optional) (Number) The time merge queue should wait after the first PR is added to the queue for the minimum group size to be met. After this time has elapsed, the minimum group size will be ignored and a smaller group will be merged. Defaults to `5`. #### rules.pull_request #### -* `dismiss_stale_reviews_on_push` - (Optional) (Boolean) New, reviewable commits pushed will dismiss previous pull request review approvals. Defaults to `false`. +- `dismiss_stale_reviews_on_push` - (Optional) (Boolean) New, reviewable commits pushed will dismiss previous pull request review approvals. Defaults to `false`. + +- `require_code_owner_review` - (Optional) (Boolean) Require an approving review in pull requests that modify files that have a designated code owner. Defaults to `false`. + +- `require_last_push_approval` - (Optional) (Boolean) Whether the most recent reviewable push must be approved by someone other than the person who pushed it. Defaults to `false`. + +- `required_approving_review_count` - (Optional) (Number) The number of approving reviews that are required before a pull request can be merged. Defaults to `0`. + +- `required_review_thread_resolution` - (Optional) (Boolean) All conversations on code must be resolved before a pull request can be merged. Defaults to `false`. + +- `allowed_merge_methods` - (Required) (List of String, Min: 1) Array of merge methods to be allowed. Allowed values include `merge`, `squash`, and `rebase`. At least one must be enabled. -* `require_code_owner_review` - (Optional) (Boolean) Require an approving review in pull requests that modify files that have a designated code owner. Defaults to `false`. +- `required_reviewers` - (Optional) (Block List) Require specific reviewers to approve pull requests. Note: This feature is in beta. (see [below for nested schema](#rules.pull_request.required_reviewers)) -* `require_last_push_approval` - (Optional) (Boolean) Whether the most recent reviewable push must be approved by someone other than the person who pushed it. Defaults to `false`. +#### rules.pull_request.required_reviewers #### -* `required_approving_review_count` - (Optional) (Number) The number of approving reviews that are required before a pull request can be merged. Defaults to `0`. +- `reviewer` - (Required) (Block List, Max: 1) The reviewer that must review matching files. (see [below for nested schema](#rules.pull_request.required_reviewers.reviewer)) -* `required_review_thread_resolution` - (Optional) (Boolean) All conversations on code must be resolved before a pull request can be merged. Defaults to `false`. +- `file_patterns` - (Required) (List of String) File patterns (fnmatch syntax) that this reviewer must approve. + +- `minimum_approvals` - (Required) (Number) Minimum number of approvals required from this reviewer. Set to 0 to make approval optional. + +#### rules.pull_request.required_reviewers.reviewer #### + +- `id` - (Required) (Number) The ID of the reviewer (Team ID). + +- `type` - (Required) (String) The type of reviewer. Currently only `Team` is supported. #### rules.required_deployments #### -* `required_deployment_environments` - (Required) (List of String) The environments that must be successfully deployed to before branches can be merged. +- `required_deployment_environments` - (Required) (List of String) The environments that must be successfully deployed to before branches can be merged. #### rules.required_status_checks #### -* `required_check` - (Required) (Block Set, Min: 1) Status checks that are required. Several can be defined. (see [below for nested schema](#rulesrequired_status_checksrequired_check)) +- `required_check` - (Required) (Block Set, Min: 1) Status checks that are required. Several can be defined. (see [below for nested schema](#rulesrequired_status_checksrequired_check)) -* `strict_required_status_checks_policy` - (Optional) (Boolean) Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. Defaults to `false`. +- `strict_required_status_checks_policy` - (Optional) (Boolean) Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. Defaults to `false`. -* `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. +- `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. #### rules.required_status_checks.required_check #### -* `context` - (Required) (String) The status check context name that must be present on the commit. +- `context` - (Required) (String) The status check context name that must be present on the commit. -* `integration_id` - (Optional) (Number) The optional integration ID that this status check must originate from. It's a GitHub App ID, which can be obtained by following instructions from the [Get an App API docs](https://docs.github.com/en/rest/apps/apps?apiVersion=2022-11-28#get-an-app). +- `integration_id` - (Optional) (Number) The optional integration ID that this status check must originate from. It's a GitHub App ID, which can be obtained by following instructions from the [Get an App API docs](https://docs.github.com/en/rest/apps/apps?apiVersion=2022-11-28#get-an-app). #### rules.tag_name_pattern #### -* `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. -* `pattern` - (Required) (String) The pattern to match with. +- `pattern` - (Required) (String) The pattern to match with. -* `name` - (Optional) (String) How this rule will appear to users. +- `name` - (Optional) (String) How this rule will appear to users. -* `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. #### rules.required_code_scanning #### -* `required_code_scanning_tool` - (Required) (Block Set, Min: 1) Actions code scanning tools that are required. Multiple can be defined. (see [below for nested schema](#rulesrequired_code_scanningrequired_code_scanning_tool)) +- `required_code_scanning_tool` - (Required) (Block Set, Min: 1) Actions code scanning tools that are required. Multiple can be defined. (see [below for nested schema](#rulesrequired_code_scanningrequired_code_scanning_tool)) #### rules.required_code_scanning.required_code_scanning_tool #### -* `alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise alerts block a reference update. Can be one of: `none`, `errors`, `errors_and_warnings`, `all`. +- `alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise alerts block a reference update. Can be one of: `none`, `errors`, `errors_and_warnings`, `all`. -* `security_alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise security alerts block a reference update. Can be one of: `none`, `critical`, `high_or_higher`, `medium_or_higher`, `all`. +- `security_alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise security alerts block a reference update. Can be one of: `none`, `critical`, `high_or_higher`, `medium_or_higher`, `all`. -* `tool` - (Required) (String) The name of a code scanning tool. +- `tool` - (Required) (String) The name of a code scanning tool. #### rules.file_path_restriction #### -* `restricted_file_paths` - (Required) (Block Set, Min: 1) The file paths that are restricted from being pushed to the commit graph. +- `restricted_file_paths` - (Required) (Block Set, Min: 1) The file paths that are restricted from being pushed to the commit graph. #### rules.max_file_size #### -* `max_file_size` - (Required) (Integer) The maximum allowed size, in megabytes (MB), of a file. Valid range is 1-100 MB. +- `max_file_size` - (Required) (Integer) The maximum allowed size, in megabytes (MB), of a file. Valid range is 1-100 MB. #### rules.max_file_path_length #### -* `max_file_path_length` - (Required) (Integer) The maximum number of characters allowed in file paths. +- `max_file_path_length` - (Required) (Integer) The maximum number of characters allowed in file paths. #### rules.file_extension_restriction #### -* `restricted_file_extensions` - (Required) (Block Set, Min: 1) The file extensions that are restricted from being pushed to the commit graph. +- `restricted_file_extensions` - (Required) (Block Set, Min: 1) The file extensions that are restricted from being pushed to the commit graph. #### bypass_actors #### -* `actor_id` - (Number) The ID of the actor that can bypass a ruleset. If `actor_type` is `Integration`, `actor_id` is a GitHub App ID. App ID can be obtained by following instructions from the [Get an App API docs](https://docs.github.com/en/rest/apps/apps?apiVersion=2022-11-28#get-an-app) +- `actor_id` - (Optional) (Number) The ID of the actor that can bypass a ruleset. If `actor_type` is `Integration`, `actor_id` is a GitHub App ID. App ID can be obtained by following instructions from the [Get an App API docs](https://docs.github.com/en/rest/apps/apps?apiVersion=2022-11-28#get-an-app). Some actor types such as `DeployKey` do not have an ID. -* `actor_type` (String) The type of actor that can bypass a ruleset. Can be one of: `RepositoryRole`, `Team`, `Integration`, `OrganizationAdmin`, `DeployKey`. +- `actor_type` (String) The type of actor that can bypass a ruleset. Can be one of: `RepositoryRole`, `Team`, `Integration`, `OrganizationAdmin`, `DeployKey`. -* `bypass_mode` - (Optional) (String) When the specified actor can bypass the ruleset. pull_request means that an actor can only bypass rules on pull requests. Can be one of: `always`, `pull_request`, `exempt`. +- `bypass_mode` - (Optional) (String) When the specified actor can bypass the ruleset. pull_request means that an actor can only bypass rules on pull requests. Can be one of: `always`, `pull_request`, `exempt`. ~> Note: at the time of writing this, the following actor types correspond to the following actor IDs: -* `OrganizationAdmin` -> `1` -* `RepositoryRole` (This is the actor type, the following are the base repository roles and their associated IDs.) - * `maintain` -> `2` - * `write` -> `4` - * `admin` -> `5` +- `OrganizationAdmin` -> `1` +- `RepositoryRole` (This is the actor type, the following are the base repository roles and their associated IDs.) + - `maintain` -> `2` + - `write` -> `4` + - `admin` -> `5` #### conditions #### -* `ref_name` - (Required) (Block List, Min: 1, Max: 1) (see [below for nested schema](#conditions.ref_name)) +- `ref_name` - (Optional) (Block List, Max: 1) Required for `branch` and `tag` targets. Must NOT be set for `push` targets. (see [below for nested schema](#conditions.ref_name)) + +~> **Note:** For `push` targets, do not include `ref_name` in conditions. Push rulesets operate on file content, not on refs. The `conditions` block is optional for push targets. #### conditions.ref_name #### -* `exclude` - (Required) (List of String) Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match. +- `exclude` - (Required) (List of String) Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match. -* `include` - (Required) (List of String) Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches. +- `include` - (Required) (List of String) Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches. ## Attributes Reference The following additional attributes are exported: -* `etag` (String) +- `etag` (String) -* `node_id` (String) GraphQL global node id for use with v4 API. +- `node_id` (String) GraphQL global node id for use with v4 API. -* `ruleset_id` (Number) GitHub ID for the ruleset. +- `ruleset_id` (Number) GitHub ID for the ruleset. ## Import