[Perforce] Store all rules in one column and apply as last rule takes precedence (#41785)

Co-authored-by: Alex Ostrikov <alex.ostrikov@sourcegraph.com>
This commit is contained in:
Petri-Johan Last 2022-10-03 15:58:34 +02:00 committed by GitHub
parent ccdda648cb
commit dbbb0e0ed0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 508 additions and 273 deletions

View File

@ -25,6 +25,7 @@ All notable changes to Sourcegraph are documented in this file.
### Changed
- Git server access logs are now compliant with the audit logging format. Breaking change: The 'actor' field is now nested under 'audit' field. [#41865](https://github.com/sourcegraph/sourcegraph/pull/41865)
- All Perforce rules are now stored together in one column and evaluated on a "last rule takes precedence" basis. [#41785](https://github.com/sourcegraph/sourcegraph/pull/41785)
### Fixed

View File

@ -56,8 +56,9 @@ type SubRepoPermsArgs struct {
Repository graphql.ID
UserPermissions []struct {
BindID string
PathIncludes []string
PathExcludes []string
PathIncludes *[]string
PathExcludes *[]string
Paths *[]string
}
}

View File

@ -229,13 +229,22 @@ input UserSubRepoPermission {
"""
bindID: String!
"""
DEPRECATED
An array of paths that the user is allowed to access, in glob format.
"""
pathIncludes: [String!]!
pathIncludes: [String!] @deprecated(reason: "use paths instead.")
"""
DEPRECATED
An array of paths that the user is not allowed to access, in glob format.
"""
pathExcludes: [String!]!
pathExcludes: [String!] @deprecated(reason: "use paths instead.")
"""
An array of paths in glob format. Paths starting with a minus (-)
(i.e. "-/dev/private") prevent access, otherwise paths grant access.
The last applicable path takes precedence.
When paths is set, pathIncludes and pathExcludes are ignored.
"""
paths: [String!]
}
"""

View File

@ -248,15 +248,51 @@ func (r *Resolver) SetSubRepositoryPermissionsForUsers(ctx context.Context, args
return nil, err
}
for _, perm := range args.UserPermissions {
if (perm.PathIncludes == nil || perm.PathExcludes == nil) && perm.Paths == nil {
return nil, errors.New("either both pathIncludes and pathExcludes needs to be set, or paths needs to be set")
}
}
for _, perm := range args.UserPermissions {
userID, ok := mapping[perm.BindID]
if !ok {
return nil, errors.Errorf("user %q not found", perm.BindID)
}
var paths []string
if perm.Paths == nil {
paths = make([]string, 0, len(*perm.PathIncludes)+len(*perm.PathExcludes))
for _, include := range *perm.PathIncludes {
if !strings.HasPrefix(include, "/") { // ensure leading slash
include = "/" + include
}
paths = append(paths, include)
}
for _, exclude := range *perm.PathExcludes {
if !strings.HasPrefix(exclude, "/") { // ensure leading slash
exclude = "/" + exclude
}
paths = append(paths, "-"+exclude) // excludes start with a minus (-)
}
} else {
paths = make([]string, 0, len(*perm.Paths))
for _, path := range *perm.Paths {
if strings.HasPrefix(path, "-") {
if !strings.HasPrefix(path, "-/") {
path = "-/" + strings.TrimPrefix(path, "-")
}
} else {
if !strings.HasPrefix(path, "/") {
path = "/" + path
}
}
paths = append(paths, path)
}
}
if err := db.SubRepoPerms().Upsert(ctx, userID, repoID, authz.SubRepoPermissions{
PathIncludes: perm.PathIncludes,
PathExcludes: perm.PathExcludes,
Paths: paths,
}); err != nil {
return nil, errors.Wrap(err, "upserting sub-repo permissions")
}

View File

@ -1350,34 +1350,99 @@ func TestResolver_SetSubRepositoryPermissionsForUsers(t *testing.T) {
ctx := actor.WithActor(context.Background(), &actor.Actor{UID: 1})
test := &graphqlbackend.Test{
Context: ctx,
Schema: mustParseGraphQLSchema(t, db),
Query: `
tests := []*graphqlbackend.Test{
{
Context: ctx,
Schema: mustParseGraphQLSchema(t, db),
Query: `
mutation {
setSubRepositoryPermissionsForUsers(
repository: "UmVwb3NpdG9yeTox"
userPermissions: [{bindID: "alice", pathIncludes: ["*"], pathExcludes: ["*_test.go"]}]
userPermissions: [{bindID: "alice", pathIncludes: ["/*"], pathExcludes: ["/*_test.go"]}]
) {
alwaysNil
}
}
`,
ExpectedResult: `
ExpectedResult: `
{
"setSubRepositoryPermissionsForUsers": {
"alwaysNil": null
}
}
`,
},
{
Context: ctx,
Schema: mustParseGraphQLSchema(t, db),
Query: `
mutation {
setSubRepositoryPermissionsForUsers(
repository: "UmVwb3NpdG9yeTox"
userPermissions: [{bindID: "alice", pathIncludes: ["/*"], pathExcludes: ["/*_test.go"], paths: ["-/*_test.go", "/*"]}]
) {
alwaysNil
}
}
`,
ExpectedResult: `
{
"setSubRepositoryPermissionsForUsers": {
"alwaysNil": null
}
}
`,
},
{
Context: ctx,
Schema: mustParseGraphQLSchema(t, db),
Query: `
mutation {
setSubRepositoryPermissionsForUsers(
repository: "UmVwb3NpdG9yeTox"
userPermissions: [{bindID: "alice", paths: ["-/*_test.go", "/*"]}]
) {
alwaysNil
}
}
`,
ExpectedResult: `
{
"setSubRepositoryPermissionsForUsers": {
"alwaysNil": null
}
}
`,
},
{
Context: ctx,
Schema: mustParseGraphQLSchema(t, db),
Query: `
mutation {
setSubRepositoryPermissionsForUsers(
repository: "UmVwb3NpdG9yeTox"
userPermissions: [{bindID: "alice", pathIncludes: ["/*_test.go"]}]
) {
alwaysNil
}
}
`,
ExpectedErrors: []*gqlerrors.QueryError{
{
Message: "either both pathIncludes and pathExcludes needs to be set, or paths needs to be set",
Path: []any{string("setSubRepositoryPermissionsForUsers")},
},
},
ExpectedResult: "null",
},
}
graphqlbackend.RunTests(t, []*graphqlbackend.Test{test})
graphqlbackend.RunTests(t, tests)
// Assert that we actually tried to store perms
h := subReposStore.UpsertFunc.History()
if len(h) != 1 {
t.Fatalf("Wanted 1 call, got %d", len(h))
if len(h) != 3 {
t.Fatalf("Wanted 3 calls, got %d", len(h))
}
})
}

View File

@ -731,12 +731,10 @@ func TestPermsSyncer_syncUserPerms_subRepoPermissions(t *testing.T) {
SubRepoPermissions: map[extsvc.RepoID]*authz.SubRepoPermissions{
"abc": {
PathIncludes: []string{"include1", "include2"},
PathExcludes: []string{"exclude1", "exclude2"},
Paths: []string{"/include1", "/include2", "-/exclude1", "-/exclude2"},
},
"def": {
PathIncludes: []string{"include1", "include2"},
PathExcludes: []string{"exclude1", "exclude2"},
Paths: []string{"/include1", "/include2", "-/exclude1", "-/exclude2"},
},
},
}, nil

View File

@ -55,11 +55,8 @@ func run(logger log.Logger, depot string, input io.Reader) {
}
for depot, subRepo := range perms.SubRepoPermissions {
logger.Debug("Sub repo permissions", log.String("depot", string(depot)))
for _, include := range subRepo.PathIncludes {
logger.Debug("Include rule", log.String("rule", include))
}
for _, exclude := range subRepo.PathExcludes {
logger.Debug("Include rule", log.String("rule", exclude))
for _, path := range subRepo.Paths {
logger.Debug("Include rule", log.String("rule", path))
}
}
}

View File

@ -26,7 +26,7 @@ func TestPerformDebugScan(t *testing.T) {
logged := exporter()
// For now we'll just check that the count as well as first and last lines are
// what we expect
assert.Len(t, logged, 449)
assert.Equal(t, "Converted depot to glob", logged[0].Message)
assert.Len(t, logged, 448)
assert.Equal(t, "Converted depot to glob", logged[0].Message) // fails without error
assert.Equal(t, "Include rule", logged[len(logged)-1].Message)
}

View File

@ -227,6 +227,43 @@ open user alice * -//Sourcegraph/*/Handbook/... ## sub-matc
},
},
},
{
name: "include and exclude, then include again",
response: `
read user alice * //Sourcegraph/Security/...
read user alice * //Sourcegraph/Engineering/...
owner user alice * //Sourcegraph/Engineering/Backend/...
open user alice * //Sourcegraph/Engineering/Frontend/...
review user alice * //Sourcegraph/Handbook/...
open user alice * //Sourcegraph/Engineering/.../Frontend/...
open user alice * //Sourcegraph/.../Handbook/... ## wildcard A
list user alice * -//Sourcegraph/Security/... ## "list" can revoke read access
=read user alice * -//Sourcegraph/Engineering/Frontend/... ## exact match of a previous include
open user alice * -//Sourcegraph/Engineering/Backend/Credentials/... ## sub-match of a previous include
open user alice * -//Sourcegraph/Engineering/*/Frontend/Folder/... ## sub-match of a previous include
open user alice * -//Sourcegraph/*/Handbook/... ## sub-match of wildcard A include
read user alice * //Sourcegraph/Security/... ## give access to alice again after revoking
`,
wantPerms: &authz.ExternalUserPermissions{
IncludeContains: []extsvc.RepoID{
"//Sourcegraph/Engineering/%",
"//Sourcegraph/Engineering/Backend/%",
"//Sourcegraph/Engineering/Frontend/%",
"//Sourcegraph/Handbook/%",
"//Sourcegraph/Engineering/%/Frontend/%",
"//Sourcegraph/%/Handbook/%",
"//Sourcegraph/Security/%",
},
ExcludeContains: []extsvc.RepoID{
"//Sourcegraph/Engineering/Frontend/%",
"//Sourcegraph/Engineering/Backend/Credentials/%",
"//Sourcegraph/Engineering/[^/]+/Frontend/Folder/%",
"//Sourcegraph/[^/]+/Handbook/%",
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
@ -290,11 +327,9 @@ read user alice * -//Sourcegraph/Security/...
Exacts: []extsvc.RepoID{"//Sourcegraph/"},
SubRepoPermissions: map[extsvc.RepoID]*authz.SubRepoPermissions{
"//Sourcegraph/": {
PathIncludes: []string{
mustGlobPattern(t, "Engineering/..."),
},
PathExcludes: []string{
mustGlobPattern(t, "Security/..."),
Paths: []string{
mustGlobPattern(t, "/Engineering/..."),
mustGlobPattern(t, "-/Security/..."),
},
},
},

View File

@ -242,8 +242,8 @@ func scanProtects(logger log.Logger, rc io.Reader, s *protectsScanner) error {
match: fields[4],
}
if strings.HasPrefix(parsedLine.match, "-") {
parsedLine.isExclusion = true // is an exclusion
parsedLine.match = parsedLine.match[1:] // trim leading -
parsedLine.isExclusion = true // is an exclusion
parsedLine.match = strings.TrimPrefix(parsedLine.match, "-") // trim leading -
}
// We only care about read access. If the permission doesn't change read access,
@ -390,67 +390,32 @@ func fullRepoPermsScanner(logger log.Logger, perms *authz.ExternalUserPermission
}
lineLogger.Debug("Relevant depots", log.Strings("depots", depotStrings))
// Handle inclusions
if !line.isExclusion {
// Grant access to specified paths
for _, depot := range depots {
srp := getSubRepoPerms(depot)
newIncludes := convertRulesForWildcardDepotMatch(match, depot, patternsToGlob)
srp.PathIncludes = append(srp.PathIncludes, newIncludes...)
lineLogger.Debug("Adding include rules", log.Strings("rules", newIncludes))
var i int
for _, exclude := range srp.PathExcludes {
// Perforce ACLs can have conflicting rules and the later one wins, so
// if we get a match here we drop the existing rule.
originalExclude, exists := patternsToGlob[exclude]
if !exists {
i++
continue
}
checkWithDepotAdded := !strings.HasPrefix(originalExclude.pattern, "//") && match.Match(string(depot)+originalExclude.pattern)
if originalExclude.Match(match.original) || checkWithDepotAdded {
lineLogger.Debug("Removing conflicting exclude rule", log.String("rule", originalExclude.pattern))
srp.PathExcludes = append(srp.PathExcludes[:i], srp.PathExcludes[i+1:]...)
} else {
i++
}
}
}
return nil
}
// Apply rules to specified paths
for _, depot := range depots {
srp := getSubRepoPerms(depot)
// Special case: exclude entire depot
// Special case: match entire depot overrides all previous rules
if strings.TrimPrefix(match.original, string(depot)) == perforceWildcardMatchAll {
lineLogger.Debug("Exclude entire depot, removing all include rules")
srp.PathIncludes = nil
if line.isExclusion {
lineLogger.Debug("Exclude entire depot, removing all previous rules")
} else {
lineLogger.Debug("Include entire depot, removing all previous rules")
}
srp.Paths = nil
}
newExcludes := convertRulesForWildcardDepotMatch(match, depot, patternsToGlob)
srp.PathExcludes = append(srp.PathExcludes, newExcludes...)
lineLogger.Debug("Adding exclude rules", log.Strings("rules", newExcludes))
var i int
for _, include := range srp.PathIncludes {
// Perforce ACLs can have conflicting rules and the later one wins, so
// if we get a match here we drop the existing rule.
originalInclude, exists := patternsToGlob[include]
if !exists {
i++
continue
}
checkWithDepotAdded := !strings.HasPrefix(originalInclude.pattern, "//") && match.Match(string(depot)+originalInclude.pattern)
if match.Match(originalInclude.original) || checkWithDepotAdded {
lineLogger.Debug("Removing conflicting include rule", log.String("rule", originalInclude.pattern))
srp.PathIncludes = append(srp.PathIncludes[:i], srp.PathIncludes[i+1:]...)
} else {
i++
newPaths := convertRulesForWildcardDepotMatch(match, depot, patternsToGlob)
if line.isExclusion {
for i, path := range newPaths {
newPaths[i] = "-" + path
}
}
srp.Paths = append(srp.Paths, newPaths...)
if line.isExclusion {
lineLogger.Debug("Adding exclude rules", log.Strings("rules", newPaths))
} else {
lineLogger.Debug("Adding include rules", log.Strings("rules", newPaths))
}
}
return nil
@ -461,7 +426,17 @@ func fullRepoPermsScanner(logger log.Logger, perms *authz.ExternalUserPermission
srp, exists := perms.SubRepoPermissions[depot]
if !exists {
continue
} else if len(srp.PathIncludes) == 0 {
}
onlyExclusions := true
for _, path := range srp.Paths {
if !strings.HasPrefix(path, "-") {
onlyExclusions = false
break
}
}
if onlyExclusions {
// Depots with no inclusions can just be dropped
delete(perms.SubRepoPermissions, depot)
continue
@ -472,13 +447,19 @@ func fullRepoPermsScanner(logger log.Logger, perms *authz.ExternalUserPermission
// repositoryPathPattern has been used. We also need to remove any `//` prefixes
// which are included in all Helix server rules.
depotString := string(depot)
for i := range srp.PathIncludes {
srp.PathIncludes[i] = strings.TrimPrefix(srp.PathIncludes[i], depotString)
srp.PathIncludes[i] = strings.TrimPrefix(srp.PathIncludes[i], "//")
}
for i := range srp.PathExcludes {
srp.PathExcludes[i] = strings.TrimPrefix(srp.PathExcludes[i], depotString)
srp.PathExcludes[i] = strings.TrimPrefix(srp.PathExcludes[i], "//")
for i := range srp.Paths {
path := srp.Paths[i]
// Covering exclusion paths
if strings.HasPrefix(path, "-") {
path = strings.TrimPrefix(path, "-")
path = trimDepotNameAndSlashes(path, depotString)
path = "-" + path
} else {
path = trimDepotNameAndSlashes(path, depotString)
}
srp.Paths[i] = path
}
// Add to repos users can access
@ -489,6 +470,16 @@ func fullRepoPermsScanner(logger log.Logger, perms *authz.ExternalUserPermission
}
}
func trimDepotNameAndSlashes(s, depotName string) string {
depotName = strings.TrimSuffix(depotName, "/") // we want to keep the leading slash
s = strings.TrimPrefix(s, depotName)
s = strings.TrimPrefix(s, "//")
if !strings.HasPrefix(s, "/") {
s = "/" + s // make sure path starts with a '/'
}
return s
}
func convertRulesForWildcardDepotMatch(match globMatch, depot extsvc.RepoID, patternsToGlob map[string]globMatch) []string {
logger := log.Scoped("convertRulesForWildcardDepotMatch", "")
if !strings.Contains(match.pattern, "**") && !strings.Contains(match.pattern, "*") {

View File

@ -256,41 +256,38 @@ func TestScanFullRepoPermissions(t *testing.T) {
},
SubRepoPermissions: map[extsvc.RepoID]*authz.SubRepoPermissions{
"//depot/main/": {
PathIncludes: []string{
mustGlobPattern(t, "base/..."),
mustGlobPattern(t, "*/stuff/..."),
mustGlobPattern(t, "frontend/.../stuff/*"),
mustGlobPattern(t, "config.yaml"),
mustGlobPattern(t, "subdir/**"),
mustGlobPattern(t, ".../README.md"),
mustGlobPattern(t, "dir.yaml"),
},
PathExcludes: []string{
mustGlobPattern(t, "subdir/remove/"),
mustGlobPattern(t, "subdir/*/also-remove/..."),
mustGlobPattern(t, ".../.secrets.env"),
Paths: []string{
mustGlobPattern(t, "-/..."),
mustGlobPattern(t, "/base/..."),
mustGlobPattern(t, "/*/stuff/..."),
mustGlobPattern(t, "/frontend/.../stuff/*"),
mustGlobPattern(t, "/config.yaml"),
mustGlobPattern(t, "/subdir/**"),
mustGlobPattern(t, "-/subdir/remove/"),
mustGlobPattern(t, "/subdir/some-dir/also-remove/..."),
mustGlobPattern(t, "/subdir/another-dir/also-remove/..."),
mustGlobPattern(t, "-/subdir/*/also-remove/..."),
mustGlobPattern(t, "/.../README.md"),
mustGlobPattern(t, "/dir.yaml"),
mustGlobPattern(t, "-/.../.secrets.env"),
},
},
"//depot/test/": {
PathIncludes: []string{
mustGlobPattern(t, "..."),
mustGlobPattern(t, ".../README.md"),
mustGlobPattern(t, "dir.yaml"),
},
PathExcludes: []string{
mustGlobPattern(t, ".../.secrets.env"),
Paths: []string{
mustGlobPattern(t, "/..."),
mustGlobPattern(t, "/.../README.md"),
mustGlobPattern(t, "/dir.yaml"),
mustGlobPattern(t, "-/.../.secrets.env"),
},
},
"//depot/training/": {
PathIncludes: []string{
mustGlobPattern(t, "..."),
mustGlobPattern(t, ".../README.md"),
mustGlobPattern(t, "dir.yaml"),
},
PathExcludes: []string{
mustGlobPattern(t, "secrets/..."),
mustGlobPattern(t, ".env"),
mustGlobPattern(t, ".../.secrets.env"),
Paths: []string{
mustGlobPattern(t, "/..."),
mustGlobPattern(t, "-/secrets/..."),
mustGlobPattern(t, "-/.env"),
mustGlobPattern(t, "/.../README.md"),
mustGlobPattern(t, "/dir.yaml"),
mustGlobPattern(t, "-/.../.secrets.env"),
},
},
},
@ -337,15 +334,14 @@ func TestScanFullRepoPermissionsWithWildcardMatchingDepot(t *testing.T) {
},
SubRepoPermissions: map[extsvc.RepoID]*authz.SubRepoPermissions{
"//depot/main/base/": {
PathIncludes: []string{
mustGlobPattern(t, "**"),
},
PathExcludes: []string{
mustGlobPattern(t, "**"),
mustGlobPattern(t, "**/base/build/deleteorgs.txt"),
mustGlobPattern(t, "build/deleteorgs.txt"),
mustGlobPattern(t, "**/base/build/**/asdf.txt"),
mustGlobPattern(t, "build/**/asdf.txt"),
Paths: []string{
mustGlobPattern(t, "-/**"),
mustGlobPattern(t, "/**"),
mustGlobPattern(t, "-/**"),
mustGlobPattern(t, "-/**/base/build/deleteorgs.txt"),
mustGlobPattern(t, "-/build/deleteorgs.txt"),
mustGlobPattern(t, "-/**/base/build/**/asdf.txt"),
mustGlobPattern(t, "-/build/**/asdf.txt"),
},
},
},
@ -478,6 +474,49 @@ read group Rome * //depot/dev/...
protectsFile: "testdata/sample-protects-edb.txt",
canReadAll: []string{"db/plpgsql/seed.psql"},
},
{
name: "Leading slash edge cases",
depot: "//depot/",
protects: `
read group Rome * //depot/.../something.java ## Can read all files named 'something.java'
read group Rome * -//depot/dev/prodA/... ## Except files in this directory
`,
cannotReadAny: []string{"dev/prodA/something.java", "dev/prodA/another_dir/something.java", "/dev/prodA/something.java", "/dev/prodA/another_dir/something.java"},
canReadAll: []string{"something.java", "/something.java", "dev/prodB/something.java", "/dev/prodC/something.java"},
},
{
name: "Deny all, grant some",
depot: "//depot/main/",
protects: `
read group Dev1 * -//depot/main/...
read group Dev1 * -//depot/main/.../*.java
read group Dev1 * //depot/main/.../dev/foo.java
`,
canReadAll: []string{"dev/foo.java"},
cannotReadAny: []string{"dev/bar.java"},
},
{
name: "Grant all, deny some",
depot: "//depot/main/",
protects: `
read group Dev1 * //depot/main/...
read group Dev1 * //depot/main/.../*.java
read group Dev1 * -//depot/main/.../dev/foo.java
`,
canReadAll: []string{"dev/bar.java"},
cannotReadAny: []string{"dev/foo.java"},
},
{
name: "Tricky minus names",
depot: "//-depot/-main/",
protects: `
read group Dev1 * //-depot/-main/...
read group Dev1 * //-depot/-main/.../*.java
read group Dev1 * -//-depot/-main/.../dev/foo.java
`,
canReadAll: []string{"dev/bar.java", "/-minus/dev/bar.java"},
cannotReadAny: []string{"dev/foo.java"},
},
} {
t.Run(tc.name, func(t *testing.T) {
logger := logtest.Scoped(t)
@ -536,7 +575,7 @@ read group Rome * //depot/dev/...
} else if ok && tc.noRules {
t.Fatal("expected no rules")
}
checker, err := authz.NewSimpleChecker(api.RepoName(tc.depot), rules.PathIncludes, rules.PathExcludes)
checker, err := authz.NewSimpleChecker(api.RepoName(tc.depot), rules.Paths)
if err != nil {
t.Fatal(err)
}
@ -601,16 +640,15 @@ func TestFullScanWildcardDepotMatching(t *testing.T) {
},
SubRepoPermissions: map[extsvc.RepoID]*authz.SubRepoPermissions{
"//depot/654/deploy/base/": {
PathExcludes: []string{
mustGlobPattern(t, "**/base/build/deleteorgs.txt"),
mustGlobPattern(t, "build/deleteorgs.txt"),
mustGlobPattern(t, "asdf/plsql/base/cCustomSchema*.sql"),
},
PathIncludes: []string{
mustGlobPattern(t, "db/upgrade-scripts/**"),
mustGlobPattern(t, "db/my_db/upgrade-scripts/**"),
mustGlobPattern(t, "asdf/config/my_schema.xml"),
mustGlobPattern(t, "db/plpgsql/**"),
Paths: []string{
mustGlobPattern(t, "-/**"),
mustGlobPattern(t, "-/**/base/build/deleteorgs.txt"),
mustGlobPattern(t, "-/build/deleteorgs.txt"),
mustGlobPattern(t, "-/asdf/plsql/base/cCustomSchema*.sql"),
mustGlobPattern(t, "/db/upgrade-scripts/**"),
mustGlobPattern(t, "/db/my_db/upgrade-scripts/**"),
mustGlobPattern(t, "/asdf/config/my_schema.xml"),
mustGlobPattern(t, "/db/plpgsql/**"),
},
},
},

View File

@ -46,8 +46,7 @@ import (
//
// Paths are relative to the root of the repo.
type SubRepoPermissions struct {
PathIncludes []string
PathExcludes []string
Paths []string
}
// ExternalUserPermissions is a collection of accessible repository/project IDs

View File

@ -43,7 +43,7 @@ type SubRepoPermissionChecker interface {
// If the userID represents an anonymous user, ErrUnauthenticated is returned.
Permissions(ctx context.Context, userID int32, content RepoContent) (Perms, error)
// FilePermissionFunc returns a FilePermissionFunc for userID in repo.
// FilePermissionsFunc returns a FilePermissionFunc for userID in repo.
// This function should only be used during the lifetime of a request. It
// exists to amortize the cost of checking many files in a repo.
//
@ -125,10 +125,44 @@ type cachedRules struct {
timestamp time.Time
}
type path struct {
globPath glob.Glob
exclusion bool
}
type compiledRules struct {
includes []glob.Glob
excludes []glob.Glob
dirIncludes []glob.Glob
paths []path
// parent directories of all included paths so that we can still see
// the paths in file navigation
dirs []glob.Glob
}
// GetPermissionsForPath tries to match a given path to a list of rules.
// Since the last applicable rule is the one that applies, the list is
// traversed in reverse, and the function returns as soon as a match is found.
// If no match is found, None is returned.
func (rules compiledRules) GetPermissionsForPath(path string) Perms {
// We want to match any directories above paths that we include so that we
// can browse down the file hierarchy.
if strings.HasSuffix(path, "/") {
for _, dir := range rules.dirs {
if dir.Match(path) {
return Read
}
}
}
for i := len(rules.paths) - 1; i >= 0; i-- {
if rules.paths[i].globPath.Match(path) {
if rules.paths[i].exclusion {
return None
}
return Read
}
}
// Return None if no rule matches
return None
}
// NewSubRepoPermsClient instantiates an instance of authz.SubRepoPermsClient
@ -275,31 +309,14 @@ func (s *SubRepoPermsClient) FilePermissionsFunc(ctx context.Context, userID int
return Read, nil
}
// The current path needs to either be included or NOT excluded and we'll give
// preference to exclusion.
for _, rule := range rules.excludes {
if rule.Match(path) {
return None, nil
}
}
for _, rule := range rules.includes {
if rule.Match(path) {
return Read, nil
}
// Prefix path with "/", otherwise suffix rules like "**/file.txt" won't match
if !strings.HasPrefix(path, "/") {
path = "/" + path
}
// We also want to match any directories above paths that we include so that we
// can browse down the file hierarchy.
if strings.HasSuffix(path, "/") {
for _, rule := range rules.dirIncludes {
if rule.Match(path) {
return Read, nil
}
}
}
// Return None if no rule matches to be safe
return None, nil
// Iterate through all rules for the current path, and the final match takes
// preference.
return rules.GetPermissionsForPath(path), nil
}, nil
}
@ -333,15 +350,23 @@ func (s *SubRepoPermsClient) getCompiledRules(ctx context.Context, userID int32)
timestamp: time.Time{},
}
for repo, perms := range repoPerms {
includes := make([]glob.Glob, 0, len(perms.PathIncludes))
dirIncludes := make([]glob.Glob, 0)
paths := make([]path, 0, len(perms.Paths))
allDirs := make([]glob.Glob, 0)
dirSeen := make(map[string]struct{})
for _, rule := range perms.PathIncludes {
for _, rule := range perms.Paths {
exclusion := strings.HasPrefix(rule, "-")
rule = strings.TrimPrefix(rule, "-")
if !strings.HasPrefix(rule, "/") {
rule = "/" + rule
}
g, err := glob.Compile(rule, '/')
if err != nil {
return nil, errors.Wrap(err, "building include matcher")
}
includes = append(includes, g)
paths = append(paths, path{g, exclusion})
// We should include all directories above an include rule
dirs := expandDirs(rule)
@ -353,23 +378,17 @@ func (s *SubRepoPermsClient) getCompiledRules(ctx context.Context, userID int32)
if err != nil {
return nil, errors.Wrap(err, "building include matcher for dir")
}
dirIncludes = append(dirIncludes, g)
if exclusion {
continue
}
allDirs = append(allDirs, g)
dirSeen[dir] = struct{}{}
}
}
excludes := make([]glob.Glob, 0, len(perms.PathExcludes))
for _, rule := range perms.PathExcludes {
g, err := glob.Compile(rule, '/')
if err != nil {
return nil, errors.Wrap(err, "building exclude matcher")
}
excludes = append(excludes, g)
}
toCache.rules[repo] = compiledRules{
includes: includes,
excludes: excludes,
dirIncludes: dirIncludes,
paths: paths,
dirs: allDirs,
}
}
toCache.timestamp = s.clock()
@ -401,16 +420,20 @@ func (s *SubRepoPermsClient) EnabledForRepo(ctx context.Context, repo api.RepoNa
func expandDirs(rule string) []string {
dirs := make([]string, 0)
// Make sure the rule starts with a slash
if !strings.HasPrefix(rule, "/") {
rule = "/" + rule
}
// We can't support rules that start with a wildcard because we can only
// see one level of the tree at a time so we have no way of knowing which path leads
// to a file the user is allowed to see.
if strings.HasPrefix(rule, "*") {
if strings.HasPrefix(rule, "/*") {
return dirs
}
for {
lastSlash := strings.LastIndex(rule, "/")
if lastSlash == -1 {
if lastSlash <= 0 { // we have to ignore the slash at index 0
break
}
// Drop anything after the last slash
@ -425,13 +448,12 @@ func expandDirs(rule string) []string {
// NewSimpleChecker is exposed for testing and allows creation of a simple
// checker based on the rules provided. The rules are expected to be in glob
// format.
func NewSimpleChecker(repo api.RepoName, includes []string, excludes []string) (SubRepoPermissionChecker, error) {
func NewSimpleChecker(repo api.RepoName, paths []string) (SubRepoPermissionChecker, error) {
getter := NewMockSubRepoPermissionsGetter()
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
repo: {
PathIncludes: includes,
PathExcludes: excludes,
Paths: paths,
},
}, nil
})

View File

@ -61,8 +61,7 @@ func TestSubRepoPermsPermissions(t *testing.T) {
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
"sample": {
PathIncludes: []string{},
PathExcludes: []string{},
Paths: []string{},
},
}, nil
})
@ -82,8 +81,7 @@ func TestSubRepoPermsPermissions(t *testing.T) {
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
"sample": {
PathIncludes: []string{},
PathExcludes: []string{"/dev/*"},
Paths: []string{"-/dev/*"},
},
}, nil
})
@ -103,7 +101,7 @@ func TestSubRepoPermsPermissions(t *testing.T) {
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
"sample": {
PathIncludes: []string{"*"},
Paths: []string{"/*"},
},
}, nil
})
@ -112,7 +110,7 @@ func TestSubRepoPermsPermissions(t *testing.T) {
want: None,
},
{
name: "Exclude takes precedence",
name: "Last rule takes precedence (exclude)",
userID: 1,
content: RepoContent{
Repo: "sample",
@ -123,8 +121,7 @@ func TestSubRepoPermsPermissions(t *testing.T) {
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
"sample": {
PathIncludes: []string{"*"},
PathExcludes: []string{"/dev/*"},
Paths: []string{"/**", "-/dev/*"},
},
}, nil
})
@ -132,6 +129,26 @@ func TestSubRepoPermsPermissions(t *testing.T) {
},
want: None,
},
{
name: "Last rule takes precedence (include)",
userID: 1,
content: RepoContent{
Repo: "sample",
Path: "/dev/thing",
},
clientFn: func() (*SubRepoPermsClient, error) {
getter := NewMockSubRepoPermissionsGetter()
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
"sample": {
Paths: []string{"-/dev/*", "/**"},
},
}, nil
})
return NewSubRepoPermsClient(getter)
},
want: Read,
},
}
for _, tc := range testCases {
@ -233,19 +250,17 @@ func BenchmarkFilterActorPaths(b *testing.B) {
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
repo: {
PathIncludes: []string{
"base/**",
"*/stuff/**",
"frontend/**/stuff/*",
"config.yaml",
"subdir/**",
"**/README.md",
"dir.yaml",
},
PathExcludes: []string{
"subdir/remove/",
"subdir/*/also-remove/**",
"**/.secrets.env",
Paths: []string{
"/base/**",
"/*/stuff/**",
"/frontend/**/stuff/*",
"/config.yaml",
"/subdir/**",
"/**/README.md",
"/dir.yaml",
"-/subdir/remove/",
"-/subdir/*/also-remove/**",
"-/**/.secrets.env",
},
},
}, nil
@ -350,38 +365,54 @@ func TestSubRepoPermissionsCanReadDirectoriesInPath(t *testing.T) {
repoName := api.RepoName("repo")
testCases := []struct {
pathIncludes []string
paths []string
canReadAll []string
cannotReadAny []string
}{
{
pathIncludes: []string{"foo/bar/thing.txt"},
paths: []string{"foo/bar/thing.txt"},
canReadAll: []string{"foo/", "foo/bar/"},
cannotReadAny: []string{"foo/thing.txt", "foo/bar/other.txt"},
},
{
pathIncludes: []string{"foo/bar/**"},
canReadAll: []string{"foo/", "foo/bar/", "foo/bar/baz/", "foo/bar/baz/fox/"},
paths: []string{"foo/bar/**"},
canReadAll: []string{"foo/", "foo/bar/", "foo/bar/baz/", "foo/bar/baz/fox/"},
},
{
pathIncludes: []string{"foo/bar/"},
paths: []string{"foo/bar/"},
canReadAll: []string{"foo/", "foo/bar/"},
cannotReadAny: []string{"foo/thing.txt", "foo/bar/thing.txt"},
},
{
pathIncludes: []string{"baz/*/foo/bar/thing.txt"},
paths: []string{"baz/*/foo/bar/thing.txt"},
canReadAll: []string{"baz/", "baz/x/", "baz/x/foo/bar/"},
cannotReadAny: []string{"baz/thing.txt"},
},
// We can't support rules that start with a wildcard, see comment in expandDirs
{
pathIncludes: []string{"**/foo/bar/thing.txt"},
paths: []string{"**/foo/bar/thing.txt"},
cannotReadAny: []string{"foo/", "foo/bar/"},
},
{
pathIncludes: []string{"*/foo/bar/thing.txt"},
paths: []string{"*/foo/bar/thing.txt"},
cannotReadAny: []string{"foo/", "foo/bar/"},
},
{
paths: []string{"/**/foo/bar/thing.txt"},
cannotReadAny: []string{"foo/", "foo/bar/"},
},
{
paths: []string{"/*/foo/bar/thing.txt"},
cannotReadAny: []string{"foo/", "foo/bar/"},
},
{
paths: []string{"-/**", "/storage/redis/**"},
canReadAll: []string{"storage/", "/storage/", "/storage/redis/"},
},
{
paths: []string{"-/**", "-/storage/**", "/storage/redis/**"},
canReadAll: []string{"storage/", "/storage/", "/storage/redis/"},
},
}
for _, tc := range testCases {
@ -390,7 +421,7 @@ func TestSubRepoPermissionsCanReadDirectoriesInPath(t *testing.T) {
getter.GetByUserFunc.SetDefaultHook(func(ctx context.Context, i int32) (map[api.RepoName]SubRepoPermissions, error) {
return map[api.RepoName]SubRepoPermissions{
repoName: {
PathIncludes: tc.pathIncludes,
Paths: tc.paths,
},
}, nil
})

View File

@ -17319,6 +17319,19 @@
"GenerationExpression": "",
"Comment": ""
},
{
"Name": "paths",
"Index": 7,
"TypeName": "text[]",
"IsNullable": true,
"Default": "",
"CharacterMaximumLength": 0,
"IsIdentity": false,
"IdentityGeneration": "",
"IsGenerated": "NEVER",
"GenerationExpression": "",
"Comment": "Paths that begin with a minus sign (-) are exclusion paths."
},
{
"Name": "repo_id",
"Index": 1,

View File

@ -2697,6 +2697,7 @@ Foreign-key constraints:
path_includes | text[] | | |
path_excludes | text[] | | |
updated_at | timestamp with time zone | | not null | now()
paths | text[] | | |
Indexes:
"sub_repo_permissions_repo_id_user_id_version_uindex" UNIQUE, btree (repo_id, user_id, version)
"sub_repo_perms_user_id" btree (user_id)
@ -2708,6 +2709,8 @@ Foreign-key constraints:
Responsible for storing permissions at a finer granularity than repo
**paths**: Paths that begin with a minus sign (-) are exclusion paths.
# Table "public.survey_responses"
```
Column | Type | Collation | Nullable | Default

View File

@ -71,18 +71,17 @@ func (s *subRepoPermsStore) Done(err error) error {
// Upsert will upsert sub repo permissions data.
func (s *subRepoPermsStore) Upsert(ctx context.Context, userID int32, repoID api.RepoID, perms authz.SubRepoPermissions) error {
q := sqlf.Sprintf(`
INSERT INTO sub_repo_permissions (user_id, repo_id, path_includes, path_excludes, version, updated_at)
VALUES (%s, %s, %s, %s, %s, now())
INSERT INTO sub_repo_permissions (user_id, repo_id, paths, version, updated_at)
VALUES (%s, %s, %s, %s, now())
ON CONFLICT (user_id, repo_id, version)
DO UPDATE
SET
user_id = EXCLUDED.user_ID,
repo_id = EXCLUDED.repo_id,
path_includes = EXCLUDED.path_includes,
path_excludes = EXCLUDED.path_excludes,
paths = EXCLUDED.paths,
version = EXCLUDED.version,
updated_at = now()
`, userID, repoID, pq.Array(perms.PathIncludes), pq.Array(perms.PathExcludes), SubRepoPermsVersion)
`, userID, repoID, pq.Array(perms.Paths), SubRepoPermsVersion)
return errors.Wrap(s.Exec(ctx, q), "upserting sub repo permissions")
}
@ -91,8 +90,8 @@ SET
// nothing is written.
func (s *subRepoPermsStore) UpsertWithSpec(ctx context.Context, userID int32, spec api.ExternalRepoSpec, perms authz.SubRepoPermissions) error {
q := sqlf.Sprintf(`
INSERT INTO sub_repo_permissions (user_id, repo_id, path_includes, path_excludes, version, updated_at)
SELECT %s, id, %s, %s, %s, now()
INSERT INTO sub_repo_permissions (user_id, repo_id, paths, version, updated_at)
SELECT %s, id, %s, %s, now()
FROM repo
WHERE external_service_id = %s
AND external_service_type = %s
@ -102,11 +101,10 @@ DO UPDATE
SET
user_id = EXCLUDED.user_ID,
repo_id = EXCLUDED.repo_id,
path_includes = EXCLUDED.path_includes,
path_excludes = EXCLUDED.path_excludes,
paths = EXCLUDED.paths,
version = EXCLUDED.version,
updated_at = now()
`, userID, pq.Array(perms.PathIncludes), pq.Array(perms.PathExcludes), SubRepoPermsVersion, spec.ServiceID, spec.ServiceType, spec.ID)
`, userID, pq.Array(perms.Paths), SubRepoPermsVersion, spec.ServiceID, spec.ServiceType, spec.ID)
return errors.Wrap(s.Exec(ctx, q), "upserting sub repo permissions with spec")
}
@ -114,7 +112,7 @@ SET
// Get will fetch sub repo rules for the given repo and user combination.
func (s *subRepoPermsStore) Get(ctx context.Context, userID int32, repoID api.RepoID) (*authz.SubRepoPermissions, error) {
q := sqlf.Sprintf(`
SELECT path_includes, path_excludes
SELECT paths
FROM sub_repo_permissions
WHERE repo_id = %s
AND user_id = %s
@ -128,13 +126,11 @@ WHERE repo_id = %s
perms := new(authz.SubRepoPermissions)
for rows.Next() {
var includes []string
var excludes []string
if err := rows.Scan(pq.Array(&includes), pq.Array(&excludes)); err != nil {
var paths []string
if err := rows.Scan(pq.Array(&paths)); err != nil {
return nil, errors.Wrap(err, "scanning row")
}
perms.PathIncludes = append(perms.PathIncludes, includes...)
perms.PathExcludes = append(perms.PathExcludes, excludes...)
perms.Paths = append(perms.Paths, paths...)
}
if err := rows.Close(); err != nil {
@ -147,7 +143,7 @@ WHERE repo_id = %s
// GetByUser fetches all sub repo perms for a user keyed by repo.
func (s *subRepoPermsStore) GetByUser(ctx context.Context, userID int32) (map[api.RepoName]authz.SubRepoPermissions, error) {
q := sqlf.Sprintf(`
SELECT r.name, path_includes, path_excludes
SELECT r.name, paths
FROM sub_repo_permissions
JOIN repo r on r.id = repo_id
WHERE user_id = %s
@ -163,7 +159,7 @@ WHERE user_id = %s
for rows.Next() {
var perms authz.SubRepoPermissions
var repoName api.RepoName
if err := rows.Scan(&repoName, pq.Array(&perms.PathIncludes), pq.Array(&perms.PathExcludes)); err != nil {
if err := rows.Scan(&repoName, pq.Array(&perms.Paths)); err != nil {
return nil, errors.Wrap(err, "scanning row")
}
result[repoName] = perms
@ -178,7 +174,7 @@ WHERE user_id = %s
func (s *subRepoPermsStore) GetByUserAndService(ctx context.Context, userID int32, serviceType string, serviceID string) (map[api.ExternalRepoSpec]authz.SubRepoPermissions, error) {
q := sqlf.Sprintf(`
SELECT r.external_id, path_includes, path_excludes
SELECT r.external_id, paths
FROM sub_repo_permissions
JOIN repo r on r.id = repo_id
WHERE user_id = %s
@ -199,7 +195,7 @@ WHERE user_id = %s
ServiceType: serviceType,
ServiceID: serviceID,
}
if err := rows.Scan(&spec.ID, pq.Array(&perms.PathIncludes), pq.Array(&perms.PathExcludes)); err != nil {
if err := rows.Scan(&spec.ID, pq.Array(&perms.Paths)); err != nil {
return nil, errors.Wrap(err, "scanning row")
}
result[spec] = perms

View File

@ -30,8 +30,7 @@ func TestSubRepoPermsInsert(t *testing.T) {
userID := int32(1)
repoID := api.RepoID(1)
perms := authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo/*"},
PathExcludes: []string{"/src/bar/*"},
Paths: []string{"/src/foo/*", "-/src/bar/*"},
}
if err := s.Upsert(ctx, userID, repoID, perms); err != nil {
t.Fatal(err)
@ -63,8 +62,7 @@ func TestSubRepoPermsUpsert(t *testing.T) {
userID := int32(1)
repoID := api.RepoID(1)
perms := authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo/*"},
PathExcludes: []string{"/src/bar/*"},
Paths: []string{"/src/foo/*", "-/src/bar/*"},
}
// Insert initial data
if err := s.Upsert(ctx, userID, repoID, perms); err != nil {
@ -73,8 +71,7 @@ func TestSubRepoPermsUpsert(t *testing.T) {
// Upsert to change perms
perms = authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo_upsert/*"},
PathExcludes: []string{"/src/bar_upsert/*"},
Paths: []string{"/src/foo_upsert/*", "-/src/bar_upsert/*"},
}
if err := s.Upsert(ctx, userID, repoID, perms); err != nil {
t.Fatal(err)
@ -106,8 +103,7 @@ func TestSubRepoPermsUpsertWithSpec(t *testing.T) {
userID := int32(1)
repoID := api.RepoID(1)
perms := authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo/*"},
PathExcludes: []string{"/src/bar/*"},
Paths: []string{"/src/foo/*", "-/src/bar/*"},
}
spec := api.ExternalRepoSpec{
ID: "MDEwOlJlcG9zaXRvcnk0MTI4ODcwOA==",
@ -121,8 +117,7 @@ func TestSubRepoPermsUpsertWithSpec(t *testing.T) {
// Upsert to change perms
perms = authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo_upsert/*"},
PathExcludes: []string{"/src/bar_upsert/*"},
Paths: []string{"/src/foo_upsert/*", "-/src/bar_upsert/*"},
}
if err := s.UpsertWithSpec(ctx, userID, spec, perms); err != nil {
t.Fatal(err)
@ -153,8 +148,7 @@ func TestSubRepoPermsGetByUser(t *testing.T) {
userID := int32(1)
perms := authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo/*"},
PathExcludes: []string{"/src/bar/*"},
Paths: []string{"/src/foo/*", "-/src/bar/*"},
}
if err := s.Upsert(ctx, userID, api.RepoID(1), perms); err != nil {
t.Fatal(err)
@ -162,8 +156,7 @@ func TestSubRepoPermsGetByUser(t *testing.T) {
userID = int32(1)
perms = authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo2/*"},
PathExcludes: []string{"/src/bar2/*"},
Paths: []string{"/src/foo2/*", "-/src/bar2/*"},
}
if err := s.Upsert(ctx, userID, api.RepoID(2), perms); err != nil {
t.Fatal(err)
@ -176,12 +169,10 @@ func TestSubRepoPermsGetByUser(t *testing.T) {
want := map[api.RepoName]authz.SubRepoPermissions{
"github.com/foo/bar": {
PathIncludes: []string{"/src/foo/*"},
PathExcludes: []string{"/src/bar/*"},
Paths: []string{"/src/foo/*", "-/src/bar/*"},
},
"github.com/foo/baz": {
PathIncludes: []string{"/src/foo2/*"},
PathExcludes: []string{"/src/bar2/*"},
Paths: []string{"/src/foo2/*", "-/src/bar2/*"},
},
}
@ -206,8 +197,7 @@ func TestSubRepoPermsGetByUserAndService(t *testing.T) {
userID := int32(1)
perms := authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo/*"},
PathExcludes: []string{"/src/bar/*"},
Paths: []string{"/src/foo/*", "-/src/bar/*"},
}
if err := s.Upsert(ctx, userID, api.RepoID(1), perms); err != nil {
t.Fatal(err)
@ -215,8 +205,7 @@ func TestSubRepoPermsGetByUserAndService(t *testing.T) {
userID = int32(1)
perms = authz.SubRepoPermissions{
PathIncludes: []string{"/src/foo2/*"},
PathExcludes: []string{"/src/bar2/*"},
Paths: []string{"/src/foo2/*", "-/src/bar2/*"},
}
if err := s.Upsert(ctx, userID, api.RepoID(2), perms); err != nil {
t.Fatal(err)
@ -247,16 +236,14 @@ func TestSubRepoPermsGetByUserAndService(t *testing.T) {
ServiceType: "github",
ServiceID: "https://github.com/",
}: {
PathIncludes: []string{"/src/foo/*"},
PathExcludes: []string{"/src/bar/*"},
Paths: []string{"/src/foo/*", "-/src/bar/*"},
},
{
ID: "MDEwOlJlcG9zaXRvcnk0MTI4ODcwOB==",
ServiceType: "github",
ServiceID: "https://github.com/",
}: {
PathIncludes: []string{"/src/foo2/*"},
PathExcludes: []string{"/src/bar2/*"},
Paths: []string{"/src/foo2/*", "-/src/bar2/*"},
},
},
},

View File

@ -49,8 +49,7 @@ func TestReadDir_SubRepoFiltering(t *testing.T) {
srpGetter := database.NewMockSubRepoPermsStore()
testSubRepoPerms := map[api.RepoName]authz.SubRepoPermissions{
repo: {
PathIncludes: []string{"**"},
PathExcludes: []string{"app/**"},
Paths: []string{"/**", "-/app/**"},
},
}
srpGetter.GetByUserFunc.SetDefaultReturn(testSubRepoPerms, nil)

View File

@ -31,7 +31,7 @@ func TestFilterZoektResults(t *testing.T) {
ctx = actor.WithActor(ctx, &actor.Actor{
UID: 1,
})
checker, err := authz.NewSimpleChecker(repoName, []string{"**"}, []string{"*_test.go"})
checker, err := authz.NewSimpleChecker(repoName, []string{"/**", "-/*_test.go"})
if err != nil {
t.Fatal(err)
}

View File

@ -0,0 +1,2 @@
ALTER TABLE sub_repo_permissions
DROP COLUMN IF EXISTS paths;

View File

@ -0,0 +1,2 @@
name: perforce_merge_includes_excludes_columns
parents: [1660711451, 1662467128]

View File

@ -0,0 +1,7 @@
ALTER TABLE ONLY sub_repo_permissions
ADD COLUMN IF NOT EXISTS paths text[];
COMMENT ON COLUMN sub_repo_permissions.paths IS 'Paths that begin with a minus sign (-) are exclusion paths.';
UPDATE sub_repo_permissions
SET paths = (ARRAY(SELECT CONCAT('/', path_include) FROM unnest(path_includes) as path_include) || ARRAY(SELECT CONCAT('-/', path_exclude) FROM unnest(path_excludes) as path_exclude));

View File

@ -3285,11 +3285,14 @@ CREATE TABLE sub_repo_permissions (
version integer DEFAULT 1 NOT NULL,
path_includes text[],
path_excludes text[],
updated_at timestamp with time zone DEFAULT now() NOT NULL
updated_at timestamp with time zone DEFAULT now() NOT NULL,
paths text[]
);
COMMENT ON TABLE sub_repo_permissions IS 'Responsible for storing permissions at a finer granularity than repo';
COMMENT ON COLUMN sub_repo_permissions.paths IS 'Paths that begin with a minus sign (-) are exclusion paths.';
CREATE TABLE survey_responses (
id bigint NOT NULL,
user_id integer,