mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 18:51:59 +00:00
Moving dump resolution to inside transport layer of Codenav service (#40069)
This commit is contained in:
parent
aed88868f0
commit
3873ac28b6
@ -9,7 +9,6 @@ import (
|
||||
logger "github.com/sourcegraph/log"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/enterprise"
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
codeintelresolvers "github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
codeintelgqlresolvers "github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers/graphql"
|
||||
codenavgraphql "github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/transport/graphql"
|
||||
@ -17,27 +16,12 @@ import (
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/honey"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
executorgraphql "github.com/sourcegraph/sourcegraph/internal/services/executors/transport/graphql"
|
||||
"github.com/sourcegraph/sourcegraph/internal/symbols"
|
||||
"github.com/sourcegraph/sourcegraph/internal/trace"
|
||||
)
|
||||
|
||||
func Init(ctx context.Context, db database.DB, config *Config, enterpriseServices *enterprise.Services, observationContext *observation.Context, services *Services) error {
|
||||
resolverObservationContext := &observation.Context{
|
||||
Logger: observationContext.Logger,
|
||||
Tracer: observationContext.Tracer,
|
||||
Registerer: observationContext.Registerer,
|
||||
HoneyDataset: &honey.Dataset{
|
||||
Name: "codeintel-graphql",
|
||||
SampleRate: 4,
|
||||
},
|
||||
}
|
||||
|
||||
enterpriseServices.CodeIntelResolver = newResolver(db, config, resolverObservationContext, services)
|
||||
enterpriseServices.NewCodeIntelUploadHandler = newUploadHandler(services)
|
||||
return nil
|
||||
}
|
||||
|
||||
func newResolver(db database.DB, config *Config, observationContext *observation.Context, services *Services) gql.CodeIntelResolver {
|
||||
func Init(ctx context.Context, db database.DB, config *Config, enterpriseServices *enterprise.Services, services *Services) error {
|
||||
policyMatcher := policies.NewMatcher(services.gitserverClient, policies.NoopExtractor, false, false)
|
||||
|
||||
codenavCtx := &observation.Context{
|
||||
@ -45,7 +29,8 @@ func newResolver(db database.DB, config *Config, observationContext *observation
|
||||
Tracer: &trace.Tracer{Tracer: opentracing.GlobalTracer()},
|
||||
Registerer: prometheus.DefaultRegisterer,
|
||||
}
|
||||
codenavResolver := codenavgraphql.New(services.CodeNavSvc, config.HunkCacheSize, codenavCtx)
|
||||
codenavResolver := codenavgraphql.New(services.CodeNavSvc, services.gitserverClient, config.MaximumIndexesPerMonikerSearch, config.HunkCacheSize, codenavCtx)
|
||||
executorResolver := executorgraphql.New(db)
|
||||
|
||||
innerResolver := codeintelresolvers.NewResolver(
|
||||
services.dbStore,
|
||||
@ -54,20 +39,16 @@ func newResolver(db database.DB, config *Config, observationContext *observation
|
||||
policyMatcher,
|
||||
services.indexEnqueuer,
|
||||
symbols.DefaultClient,
|
||||
config.MaximumIndexesPerMonikerSearch,
|
||||
observationContext,
|
||||
db,
|
||||
codenavResolver,
|
||||
executorResolver,
|
||||
)
|
||||
|
||||
obsCtx := &observation.Context{
|
||||
Logger: nil,
|
||||
Tracer: &trace.Tracer{},
|
||||
Registerer: nil,
|
||||
HoneyDataset: &honey.Dataset{},
|
||||
}
|
||||
observationCtx := &observation.Context{Logger: nil, Tracer: &trace.Tracer{}, Registerer: nil, HoneyDataset: &honey.Dataset{}}
|
||||
|
||||
return codeintelgqlresolvers.NewResolver(db, services.gitserverClient, innerResolver, obsCtx)
|
||||
enterpriseServices.CodeIntelResolver = codeintelgqlresolvers.NewResolver(db, services.gitserverClient, innerResolver, observationCtx)
|
||||
enterpriseServices.NewCodeIntelUploadHandler = newUploadHandler(services)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func newUploadHandler(services *Services) func(internal bool) http.Handler {
|
||||
|
||||
@ -1,100 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
|
||||
type cachedCommitChecker struct {
|
||||
gitserverClient GitserverClient
|
||||
mutex sync.RWMutex
|
||||
cache map[int]map[string]bool
|
||||
}
|
||||
|
||||
func newCachedCommitChecker(gitserverClient GitserverClient) *cachedCommitChecker {
|
||||
return &cachedCommitChecker{
|
||||
gitserverClient: gitserverClient,
|
||||
cache: map[int]map[string]bool{},
|
||||
}
|
||||
}
|
||||
|
||||
// set marks the given repository and commit as valid and resolvable by gitserver.
|
||||
func (c *cachedCommitChecker) set(repositoryID int, commit string) {
|
||||
c.setInternal(repositoryID, commit, true)
|
||||
}
|
||||
|
||||
// existsBatch determines if the given commits are resolvable for the given repositories.
|
||||
// If we do not know the answer from a previous call to set or existsBatch, we ask gitserver
|
||||
// to resolve the remaining commits and store the results for subsequent calls. This method
|
||||
// returns a slice of the same size as the input slice, true indicating that the commit at
|
||||
// the symmetric index exists.
|
||||
func (c *cachedCommitChecker) existsBatch(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error) {
|
||||
exists := make([]bool, len(commits))
|
||||
rcIndexMap := make([]int, 0, len(commits))
|
||||
rcs := make([]gitserver.RepositoryCommit, 0, len(commits))
|
||||
|
||||
for i, rc := range commits {
|
||||
if e, ok := c.getInternal(rc.RepositoryID, rc.Commit); ok {
|
||||
exists[i] = e
|
||||
} else {
|
||||
rcIndexMap = append(rcIndexMap, i)
|
||||
rcs = append(rcs, gitserver.RepositoryCommit{
|
||||
RepositoryID: rc.RepositoryID,
|
||||
Commit: rc.Commit,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if len(rcs) == 0 {
|
||||
return exists, nil
|
||||
}
|
||||
|
||||
// Perform heavy work outside of critical section
|
||||
e, err := c.gitserverClient.CommitsExist(ctx, rcs)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "gitserverClient.CommitsExist")
|
||||
}
|
||||
if len(e) != len(rcs) {
|
||||
panic(strings.Join([]string{
|
||||
fmt.Sprintf("Expected slice returned from CommitsExist to have len %d, but has len %d.", len(rcs), len(e)),
|
||||
"If this panic occurred dcuring a test, your test is missing a mock definition for CommitsExist.",
|
||||
"If this is occurred during runtime, please file a bug.",
|
||||
}, " "))
|
||||
}
|
||||
|
||||
for i, rc := range rcs {
|
||||
exists[rcIndexMap[i]] = e[i]
|
||||
c.setInternal(rc.RepositoryID, rc.Commit, e[i])
|
||||
}
|
||||
|
||||
return exists, nil
|
||||
}
|
||||
|
||||
func (c *cachedCommitChecker) getInternal(repositoryID int, commit string) (bool, bool) {
|
||||
c.mutex.RLock()
|
||||
defer c.mutex.RUnlock()
|
||||
|
||||
if repositoryMap, ok := c.cache[repositoryID]; ok {
|
||||
if exists, ok := repositoryMap[commit]; ok {
|
||||
return exists, true
|
||||
}
|
||||
}
|
||||
|
||||
return false, false
|
||||
}
|
||||
|
||||
func (c *cachedCommitChecker) setInternal(repositoryID int, commit string, exists bool) {
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
|
||||
if _, ok := c.cache[repositoryID]; !ok {
|
||||
c.cache[repositoryID] = map[string]bool{}
|
||||
}
|
||||
|
||||
c.cache[repositoryID][commit] = exists
|
||||
}
|
||||
@ -1,151 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
)
|
||||
|
||||
func TestCachedCommitChecker(t *testing.T) {
|
||||
t.Run("uncached", func(t *testing.T) {
|
||||
commits := []gitserver.RepositoryCommit{
|
||||
{RepositoryID: 150, Commit: "deadbeef1"},
|
||||
{RepositoryID: 151, Commit: "deadbeef2"},
|
||||
{RepositoryID: 152, Commit: "deadbeef3"},
|
||||
{RepositoryID: 153, Commit: "deadbeef4"},
|
||||
}
|
||||
expectedExists := []bool{
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
}
|
||||
|
||||
gitserverClient := NewMockGitserverClient()
|
||||
gitserverClient.CommitsExistFunc.SetDefaultReturn(expectedExists, nil)
|
||||
|
||||
ctx := context.Background()
|
||||
commitChecker := newCachedCommitChecker(gitserverClient)
|
||||
|
||||
exists, err := commitChecker.existsBatch(ctx, commits)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error checking commit batch: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(expectedExists, exists); diff != "" {
|
||||
t.Errorf("unexpected exists slice (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("fully cached", func(t *testing.T) {
|
||||
commits := []gitserver.RepositoryCommit{
|
||||
{RepositoryID: 150, Commit: "deadbeef1"},
|
||||
{RepositoryID: 151, Commit: "deadbeef2"},
|
||||
{RepositoryID: 152, Commit: "deadbeef3"},
|
||||
{RepositoryID: 153, Commit: "deadbeef4"},
|
||||
}
|
||||
expectedExists := []bool{
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
}
|
||||
|
||||
gitserverClient := NewMockGitserverClient()
|
||||
gitserverClient.CommitsExistFunc.SetDefaultReturn(expectedExists, nil)
|
||||
|
||||
ctx := context.Background()
|
||||
commitChecker := newCachedCommitChecker(gitserverClient)
|
||||
|
||||
exists1, err := commitChecker.existsBatch(ctx, commits)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error checking commit batch: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(expectedExists, exists1); diff != "" {
|
||||
t.Errorf("unexpected exists slice (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
// Should be fully cached
|
||||
exists2, err := commitChecker.existsBatch(ctx, commits)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error checking commit batch: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(expectedExists, exists2); diff != "" {
|
||||
t.Errorf("unexpected exists slice (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
// Should not have called underlying gitserver method twice
|
||||
if callCount := len(gitserverClient.CommitsExistFunc.History()); callCount != 1 {
|
||||
t.Errorf("unexpected call count. want=%d have=%d", 1, callCount)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("partially cached", func(t *testing.T) {
|
||||
gitserverClient := NewMockGitserverClient()
|
||||
gitserverClient.CommitsExistFunc.SetDefaultHook(func(ctx context.Context, rcs []gitserver.RepositoryCommit) (exists []bool, _ error) {
|
||||
for _, rc := range rcs {
|
||||
exists = append(exists, len(rc.Commit)%2 == 0)
|
||||
}
|
||||
return
|
||||
})
|
||||
|
||||
ctx := context.Background()
|
||||
commitChecker := newCachedCommitChecker(gitserverClient)
|
||||
|
||||
commits1 := []gitserver.RepositoryCommit{
|
||||
{RepositoryID: 151, Commit: "even"},
|
||||
{RepositoryID: 151, Commit: "odd"},
|
||||
{RepositoryID: 152, Commit: "even"},
|
||||
{RepositoryID: 152, Commit: "odd"},
|
||||
}
|
||||
exists1, err := commitChecker.existsBatch(ctx, commits1)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error checking commit batch: %s", err)
|
||||
}
|
||||
expected1 := []bool{
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
}
|
||||
if diff := cmp.Diff(expected1, exists1); diff != "" {
|
||||
t.Errorf("unexpected exists slice (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
commits2 := []gitserver.RepositoryCommit{
|
||||
{RepositoryID: 152, Commit: "odd"}, // cached
|
||||
{RepositoryID: 153, Commit: "odd"},
|
||||
{RepositoryID: 153, Commit: "even"},
|
||||
{RepositoryID: 152, Commit: "even"}, // cached
|
||||
}
|
||||
exists2, err := commitChecker.existsBatch(ctx, commits2)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error checking commit batch: %s", err)
|
||||
}
|
||||
expected2 := []bool{
|
||||
false, // cached
|
||||
false,
|
||||
true,
|
||||
true, // cached
|
||||
}
|
||||
if diff := cmp.Diff(expected2, exists2); diff != "" {
|
||||
t.Errorf("unexpected exists slice (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
// Should not have called underlying gitserver method twice
|
||||
if callCount := len(gitserverClient.CommitsExistFunc.History()); callCount != 2 {
|
||||
t.Errorf("unexpected call count. want=%d have=%d", 2, callCount)
|
||||
} else {
|
||||
calls := gitserverClient.CommitsExistFunc.History()
|
||||
|
||||
if diff := cmp.Diff(commits1, calls[0].Arg1); diff != "" {
|
||||
t.Errorf("unexpected commits argument (-want +got):\n%s", diff)
|
||||
}
|
||||
if diff := cmp.Diff(commits2[1:3], calls[1].Arg1); diff != "" {
|
||||
t.Errorf("unexpected commits argument (-want +got):\n%s", diff)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -1,146 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"github.com/opentracing/opentracing-go/log"
|
||||
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
|
||||
// numAncestors is the number of ancestors to query from gitserver when trying to find the closest
|
||||
// ancestor we have data for. Setting this value too low (relative to a repository's commit rate)
|
||||
// will cause requests for an unknown commit return too few results; setting this value too high
|
||||
// will raise the latency of requests for an unknown commit.
|
||||
//
|
||||
// TODO(efritz) - make adjustable via site configuration
|
||||
const numAncestors = 100
|
||||
|
||||
// findClosestDumps returns the set of dumps that can most accurately answer code intelligence
|
||||
// queries for the given path. If exactPath is true, then only dumps that definitely contain the
|
||||
// exact document path are returned. Otherwise, dumps containing any document for which the given
|
||||
// path is a prefix are returned. These dump IDs should be subsequently passed to invocations of
|
||||
// Definitions, References, and Hover.
|
||||
func (r *resolver) findClosestDumps(ctx context.Context, cachedCommitChecker *cachedCommitChecker, repositoryID int, commit, path string, exactPath bool, indexer string) (_ []store.Dump, err error) {
|
||||
ctx, trace, endObservation := r.operations.findClosestDumps.With(ctx, &err, observation.Args{
|
||||
LogFields: []log.Field{
|
||||
log.Int("repositoryID", repositoryID),
|
||||
log.String("commit", commit),
|
||||
log.String("path", path),
|
||||
log.Bool("exactPath", exactPath),
|
||||
log.String("indexer", indexer),
|
||||
},
|
||||
})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
candidates, err := r.inferClosestUploads(ctx, repositoryID, commit, path, exactPath, indexer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
trace.Log(
|
||||
log.Int("numCandidates", len(candidates)),
|
||||
log.String("candidates", uploadIDsToString(candidates)),
|
||||
)
|
||||
|
||||
candidatesWithCommits, err := filterUploadsWithCommits(ctx, cachedCommitChecker, candidates)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
trace.Log(
|
||||
log.Int("numCandidatesWithCommits", len(candidatesWithCommits)),
|
||||
log.String("candidatesWithCommits", uploadIDsToString(candidatesWithCommits)),
|
||||
)
|
||||
|
||||
// Filter in-place
|
||||
filtered := candidatesWithCommits[:0]
|
||||
|
||||
for i := range candidatesWithCommits {
|
||||
if exactPath {
|
||||
// TODO - this breaks if the file was renamed in git diff
|
||||
pathExists, err := r.lsifStore.Exists(ctx, candidates[i].ID, strings.TrimPrefix(path, candidates[i].Root))
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "lsifStore.Exists")
|
||||
}
|
||||
if !pathExists {
|
||||
continue
|
||||
}
|
||||
} else { //nolint:staticcheck
|
||||
// TODO(efritz) - ensure there's a valid document path for this condition as well
|
||||
}
|
||||
|
||||
filtered = append(filtered, candidates[i])
|
||||
}
|
||||
trace.Log(
|
||||
log.Int("numFiltered", len(filtered)),
|
||||
log.String("filtered", uploadIDsToString(filtered)),
|
||||
)
|
||||
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
// inferClosestUploads will return the set of visible uploads for the given commit. If this commit is
|
||||
// newer than our last refresh of the lsif_nearest_uploads table for this repository, then we will mark
|
||||
// the repository as dirty and quickly approximate the correct set of visible uploads.
|
||||
//
|
||||
// Because updating the entire commit graph is a blocking, expensive, and lock-guarded process, we want
|
||||
// to only do that in the background and do something chearp in latency-sensitive paths. To construct an
|
||||
// approximate result, we query gitserver for a (relatively small) set of ancestors for the given commit,
|
||||
// correlate that with the upload data we have for those commits, and re-run the visibility algorithm over
|
||||
// the graph. This will not always produce the full set of visible commits - some responses may not contain
|
||||
// all results while a subsequent request made after the lsif_nearest_uploads has been updated to include
|
||||
// this commit will.
|
||||
//
|
||||
// TODO(efritz) - show an indication in the GraphQL response and the UI that this repo is refreshing.
|
||||
func (r *resolver) inferClosestUploads(ctx context.Context, repositoryID int, commit, path string, exactPath bool, indexer string) ([]store.Dump, error) {
|
||||
// The parameters exactPath and rootMustEnclosePath align here: if we're looking for dumps
|
||||
// that can answer queries for a directory (e.g. diagnostics), we want any dump that happens
|
||||
// to intersect the target directory. If we're looking for dumps that can answer queries for
|
||||
// a single file, then we need a dump with a root that properly encloses that file.
|
||||
if dumps, err := r.dbStore.FindClosestDumps(ctx, repositoryID, commit, path, exactPath, indexer); err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.FindClosestDumps")
|
||||
} else if len(dumps) != 0 {
|
||||
return dumps, nil
|
||||
}
|
||||
|
||||
// Repository has no LSIF data at all
|
||||
if repositoryExists, err := r.dbStore.HasRepository(ctx, repositoryID); err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.HasRepository")
|
||||
} else if !repositoryExists {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Commit is known and the empty dumps list explicitly means nothing is visible
|
||||
if commitExists, err := r.dbStore.HasCommit(ctx, repositoryID, commit); err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.HasCommit")
|
||||
} else if commitExists {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Otherwise, the repository has LSIF data but we don't know about the commit. This commit
|
||||
// is probably newer than our last upload. Pull back a portion of the updated commit graph
|
||||
// and try to link it with what we have in the database. Then mark the repository's commit
|
||||
// graph as dirty so it's updated for subsequent requests.
|
||||
|
||||
graph, err := r.gitserverClient.CommitGraph(ctx, repositoryID, gitserver.CommitGraphOptions{
|
||||
Commit: commit,
|
||||
Limit: numAncestors,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "gitserverClient.CommitGraph")
|
||||
}
|
||||
|
||||
dumps, err := r.dbStore.FindClosestDumpsFromGraphFragment(ctx, repositoryID, commit, path, exactPath, indexer, graph)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.FindClosestDumpsFromGraphFragment")
|
||||
}
|
||||
|
||||
if err := r.dbStore.MarkRepositoryAsDirty(ctx, repositoryID); err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.MarkRepositoryAsDirty")
|
||||
}
|
||||
|
||||
return dumps, nil
|
||||
}
|
||||
@ -1,167 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver/gitdomain"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
)
|
||||
|
||||
func TestFindClosestDumps(t *testing.T) {
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockLSIFStore := NewMockLSIFStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
commitChecker := newCachedCommitChecker(mockGitserverClient)
|
||||
|
||||
// known repository and commit
|
||||
mockDBStore.HasRepositoryFunc.SetDefaultReturn(true, nil)
|
||||
mockDBStore.HasCommitFunc.SetDefaultReturn(true, nil)
|
||||
|
||||
// use commit graph in database
|
||||
mockDBStore.FindClosestDumpsFunc.SetDefaultReturn([]store.Dump{
|
||||
{ID: 50, RepositoryID: 42, Commit: "c0", Root: "s1/"},
|
||||
{ID: 51, RepositoryID: 42, Commit: "c1", Root: "s1/"}, // not in LSIF
|
||||
{ID: 52, RepositoryID: 42, Commit: "c2", Root: "s1/"},
|
||||
{ID: 53, RepositoryID: 42, Commit: "c3", Root: "s2/"}, // no file in root
|
||||
{ID: 54, RepositoryID: 42, Commit: "c4", Root: "s1/"}, // not in gitserver
|
||||
}, nil)
|
||||
|
||||
mockLSIFStore.ExistsFunc.SetDefaultHook(func(ctx context.Context, bundleID int, path string) (bool, error) {
|
||||
return path == "main.go" && bundleID != 51, nil
|
||||
})
|
||||
mockGitserverClient.CommitsExistFunc.SetDefaultHook(func(ctx context.Context, rcs []gitserver.RepositoryCommit) (exists []bool, _ error) {
|
||||
for _, rc := range rcs {
|
||||
exists = append(exists, rc.Commit != "c4")
|
||||
}
|
||||
return
|
||||
})
|
||||
|
||||
resolver := newResolver(
|
||||
mockDBStore,
|
||||
mockLSIFStore,
|
||||
mockGitserverClient,
|
||||
nil, // policyMatcher
|
||||
nil, // indexEnqueuer
|
||||
nil, // symbolsClient
|
||||
50, // maximumIndexesPerMonikerSearch
|
||||
&observation.TestContext, // observationContext
|
||||
database.NewMockDB(), // dbConn
|
||||
nil, // symbolResolver
|
||||
)
|
||||
dumps, err := resolver.findClosestDumps(context.Background(), commitChecker, 42, "deadbeef", "s1/main.go", true, "idx")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error finding closest dumps: %s", err)
|
||||
}
|
||||
|
||||
expected := []store.Dump{
|
||||
{ID: 50, RepositoryID: 42, Commit: "c0", Root: "s1/"},
|
||||
{ID: 52, RepositoryID: 42, Commit: "c2", Root: "s1/"},
|
||||
}
|
||||
if diff := cmp.Diff(expected, dumps); diff != "" {
|
||||
t.Errorf("unexpected dumps (-want +got):\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindClosestDumpsInfersClosestUploads(t *testing.T) {
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockLSIFStore := NewMockLSIFStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
commitChecker := newCachedCommitChecker(mockGitserverClient)
|
||||
|
||||
graph := gitdomain.ParseCommitGraph([]string{
|
||||
"d",
|
||||
"c",
|
||||
"b d",
|
||||
"a b c",
|
||||
})
|
||||
|
||||
// has repository, commit unknown but does exist
|
||||
mockDBStore.HasRepositoryFunc.SetDefaultReturn(true, nil)
|
||||
mockGitserverClient.CommitsExistFunc.SetDefaultHook(func(ctx context.Context, rcs []gitserver.RepositoryCommit) (exists []bool, _ error) {
|
||||
for range rcs {
|
||||
exists = append(exists, true)
|
||||
}
|
||||
return
|
||||
})
|
||||
|
||||
mockGitserverClient.CommitGraphFunc.SetDefaultReturn(graph, nil)
|
||||
mockDBStore.FindClosestDumpsFromGraphFragmentFunc.SetDefaultReturn([]store.Dump{
|
||||
{ID: 50, Root: "s1/"},
|
||||
{ID: 51, Root: "s1/"},
|
||||
{ID: 52, Root: "s1/"},
|
||||
{ID: 53, Root: "s2/"},
|
||||
}, nil)
|
||||
mockLSIFStore.ExistsFunc.SetDefaultHook(func(ctx context.Context, bundleID int, path string) (bool, error) {
|
||||
if bundleID == 50 && path == "main.go" {
|
||||
return true, nil
|
||||
}
|
||||
if bundleID == 51 && path == "main.go" {
|
||||
return false, nil
|
||||
}
|
||||
if bundleID == 52 && path == "main.go" {
|
||||
return true, nil
|
||||
}
|
||||
if bundleID == 53 && path == "s1/main.go" {
|
||||
return false, nil
|
||||
}
|
||||
return false, nil
|
||||
})
|
||||
|
||||
resolver := newResolver(mockDBStore, mockLSIFStore, mockGitserverClient, nil, nil, nil, 50, &observation.TestContext, database.NewMockDB(), nil)
|
||||
dumps, err := resolver.findClosestDumps(context.Background(), commitChecker, 42, "deadbeef", "s1/main.go", true, "idx")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error finding closest dumps: %s", err)
|
||||
}
|
||||
|
||||
expected := []store.Dump{
|
||||
{ID: 50, Root: "s1/"},
|
||||
{ID: 52, Root: "s1/"},
|
||||
}
|
||||
if diff := cmp.Diff(expected, dumps); diff != "" {
|
||||
t.Errorf("unexpected dumps (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
if calls := mockDBStore.FindClosestDumpsFromGraphFragmentFunc.History(); len(calls) != 1 {
|
||||
t.Errorf("expected number of calls to store.FindClosestDumpsFromGraphFragmentFunc. want=%d have=%d", 1, len(calls))
|
||||
} else {
|
||||
expectedGraph := map[string][]string{
|
||||
"a": {"b", "c"},
|
||||
"b": {"d"},
|
||||
"c": {},
|
||||
"d": {},
|
||||
}
|
||||
if diff := cmp.Diff(expectedGraph, calls[0].Arg6.Graph()); diff != "" {
|
||||
t.Errorf("unexpected graph (-want +got):\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
if value := len(mockDBStore.MarkRepositoryAsDirtyFunc.History()); value != 1 {
|
||||
t.Errorf("expected number of calls to store.MarkRepositoryAsDirty. want=%d have=%d", 1, value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindClosestDumpsDoesNotInferClosestUploadForUnknownRepository(t *testing.T) {
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockLSIFStore := NewMockLSIFStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
commitChecker := newCachedCommitChecker(mockGitserverClient)
|
||||
|
||||
resolver := newResolver(mockDBStore, mockLSIFStore, mockGitserverClient, nil, nil, nil, 50, &observation.TestContext, database.NewMockDB(), nil)
|
||||
dumps, err := resolver.findClosestDumps(context.Background(), commitChecker, 42, "deadbeef", "s1/main.go", true, "idx")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error finding closest dumps: %s", err)
|
||||
}
|
||||
if len(dumps) != 0 {
|
||||
t.Errorf("unexpected number of dumps. want=%d have=%d", 0, len(dumps))
|
||||
}
|
||||
|
||||
if value := len(mockDBStore.MarkRepositoryAsDirtyFunc.History()); value != 0 {
|
||||
t.Errorf("expected number of calls to store.MarkRepositoryAsDirty. want=%d have=%d", 0, value)
|
||||
}
|
||||
}
|
||||
@ -4,16 +4,15 @@ import (
|
||||
"context"
|
||||
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
|
||||
type DiagnosticResolver struct {
|
||||
diagnostic resolvers.AdjustedDiagnostic
|
||||
diagnostic AdjustedDiagnostic
|
||||
locationResolver *CachedLocationResolver
|
||||
}
|
||||
|
||||
func NewDiagnosticResolver(diagnostic resolvers.AdjustedDiagnostic, locationResolver *CachedLocationResolver) gql.DiagnosticResolver {
|
||||
func NewDiagnosticResolver(diagnostic AdjustedDiagnostic, locationResolver *CachedLocationResolver) gql.DiagnosticResolver {
|
||||
return &DiagnosticResolver{
|
||||
diagnostic: diagnostic,
|
||||
locationResolver: locationResolver,
|
||||
@ -29,7 +28,7 @@ func (r *DiagnosticResolver) Location(ctx context.Context) (gql.LocationResolver
|
||||
return resolveLocation(
|
||||
ctx,
|
||||
r.locationResolver,
|
||||
resolvers.AdjustedLocation{
|
||||
AdjustedLocation{
|
||||
Dump: r.diagnostic.Dump,
|
||||
Path: r.diagnostic.Path,
|
||||
AdjustedCommit: r.diagnostic.AdjustedCommit,
|
||||
|
||||
@ -5,16 +5,15 @@ import (
|
||||
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend/graphqlutil"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
)
|
||||
|
||||
type DiagnosticConnectionResolver struct {
|
||||
diagnostics []resolvers.AdjustedDiagnostic
|
||||
diagnostics []AdjustedDiagnostic
|
||||
totalCount int
|
||||
locationResolver *CachedLocationResolver
|
||||
}
|
||||
|
||||
func NewDiagnosticConnectionResolver(diagnostics []resolvers.AdjustedDiagnostic, totalCount int, locationResolver *CachedLocationResolver) gql.DiagnosticConnectionResolver {
|
||||
func NewDiagnosticConnectionResolver(diagnostics []AdjustedDiagnostic, totalCount int, locationResolver *CachedLocationResolver) gql.DiagnosticConnectionResolver {
|
||||
return &DiagnosticConnectionResolver{
|
||||
diagnostics: diagnostics,
|
||||
totalCount: totalCount,
|
||||
|
||||
@ -5,16 +5,15 @@ import (
|
||||
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend/graphqlutil"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
)
|
||||
|
||||
type LocationConnectionResolver struct {
|
||||
locations []resolvers.AdjustedLocation
|
||||
locations []AdjustedLocation
|
||||
cursor *string
|
||||
locationResolver *CachedLocationResolver
|
||||
}
|
||||
|
||||
func NewLocationConnectionResolver(locations []resolvers.AdjustedLocation, cursor *string, locationResolver *CachedLocationResolver) gql.LocationConnectionResolver {
|
||||
func NewLocationConnectionResolver(locations []AdjustedLocation, cursor *string, locationResolver *CachedLocationResolver) gql.LocationConnectionResolver {
|
||||
return &LocationConnectionResolver{
|
||||
locations: locations,
|
||||
cursor: cursor,
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/backend"
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/errcode"
|
||||
@ -245,7 +244,7 @@ func (r *CachedLocationResolver) resolvePath(commitResolver *gql.GitCommitResolv
|
||||
// resolveLocations creates a slide of LocationResolvers for the given list of adjusted locations. The
|
||||
// resulting list may be smaller than the input list as any locations with a commit not known by
|
||||
// gitserver will be skipped.
|
||||
func resolveLocations(ctx context.Context, locationResolver *CachedLocationResolver, locations []resolvers.AdjustedLocation) ([]gql.LocationResolver, error) {
|
||||
func resolveLocations(ctx context.Context, locationResolver *CachedLocationResolver, locations []AdjustedLocation) ([]gql.LocationResolver, error) {
|
||||
resolvedLocations := make([]gql.LocationResolver, 0, len(locations))
|
||||
for i := range locations {
|
||||
resolver, err := resolveLocation(ctx, locationResolver, locations[i])
|
||||
@ -264,7 +263,7 @@ func resolveLocations(ctx context.Context, locationResolver *CachedLocationResol
|
||||
|
||||
// resolveLocation creates a LocationResolver for the given adjusted location. This function may return a
|
||||
// nil resolver if the location's commit is not known by gitserver.
|
||||
func resolveLocation(ctx context.Context, locationResolver *CachedLocationResolver, location resolvers.AdjustedLocation) (gql.LocationResolver, error) {
|
||||
func resolveLocation(ctx context.Context, locationResolver *CachedLocationResolver, location AdjustedLocation) (gql.LocationResolver, error) {
|
||||
treeResolver, err := locationResolver.Path(ctx, api.RepoID(location.Dump.RepositoryID), location.AdjustedCommit, location.Path)
|
||||
if err != nil || treeResolver == nil {
|
||||
return nil, err
|
||||
|
||||
@ -13,7 +13,6 @@ import (
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/backend"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/lsifstore"
|
||||
@ -24,10 +23,12 @@ import (
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
|
||||
const numRoutines = 5
|
||||
const numRepositories = 10
|
||||
const numCommits = 10 // per repo
|
||||
const numPaths = 10 // per commit
|
||||
const (
|
||||
numRoutines = 5
|
||||
numRepositories = 10
|
||||
numCommits = 10 // per repo
|
||||
numPaths = 10 // per commit
|
||||
)
|
||||
|
||||
func TestCachedLocationResolver(t *testing.T) {
|
||||
repos := database.NewStrictMockRepoStore()
|
||||
@ -256,7 +257,7 @@ func TestResolveLocations(t *testing.T) {
|
||||
r3 := lsifstore.Range{Start: lsifstore.Position{Line: 31, Character: 32}, End: lsifstore.Position{Line: 33, Character: 34}}
|
||||
r4 := lsifstore.Range{Start: lsifstore.Position{Line: 41, Character: 42}, End: lsifstore.Position{Line: 43, Character: 44}}
|
||||
|
||||
locations, err := resolveLocations(context.Background(), NewCachedLocationResolver(db), []resolvers.AdjustedLocation{
|
||||
locations, err := resolveLocations(context.Background(), NewCachedLocationResolver(db), []AdjustedLocation{
|
||||
{Dump: store.Dump{RepositoryID: 50}, AdjustedCommit: "deadbeef1", AdjustedRange: r1, Path: "p1"},
|
||||
{Dump: store.Dump{RepositoryID: 51}, AdjustedCommit: "deadbeef2", AdjustedRange: r2, Path: "p2"},
|
||||
{Dump: store.Dump{RepositoryID: 52}, AdjustedCommit: "deadbeef3", AdjustedRange: r3, Path: "p3"},
|
||||
|
||||
@ -9,6 +9,9 @@ import (
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend/graphqlutil"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/transport/graphql"
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/lsifstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
@ -33,23 +36,23 @@ var ErrIllegalBounds = errors.New("illegal bounds")
|
||||
// All code intel-specific behavior is delegated to the underlying resolver instance, which is defined
|
||||
// in the parent package.
|
||||
type QueryResolver struct {
|
||||
queryResolver resolvers.QueryResolver
|
||||
resolver resolvers.Resolver
|
||||
gitserver GitserverClient
|
||||
locationResolver *CachedLocationResolver
|
||||
errTracer *observation.ErrCollector
|
||||
gitBlobLSIFDataResolver graphql.GitBlobLSIFDataResolver
|
||||
resolver resolvers.Resolver
|
||||
gitserver GitserverClient
|
||||
locationResolver *CachedLocationResolver
|
||||
errTracer *observation.ErrCollector
|
||||
}
|
||||
|
||||
// NewQueryResolver creates a new QueryResolver with the given resolver that defines all code intel-specific
|
||||
// behavior. A cached location resolver instance is also given to the query resolver, which should be used
|
||||
// to resolve all location-related values.
|
||||
func NewQueryResolver(gitserver GitserverClient, queryResolver resolvers.QueryResolver, resolver resolvers.Resolver, locationResolver *CachedLocationResolver, errTracer *observation.ErrCollector) gql.GitBlobLSIFDataResolver {
|
||||
func NewQueryResolver(gitserver GitserverClient, gitBlobResolver graphql.GitBlobLSIFDataResolver, resolver resolvers.Resolver, locationResolver *CachedLocationResolver, errTracer *observation.ErrCollector) gql.GitBlobLSIFDataResolver {
|
||||
return &QueryResolver{
|
||||
queryResolver: queryResolver,
|
||||
resolver: resolver,
|
||||
gitserver: gitserver,
|
||||
locationResolver: locationResolver,
|
||||
errTracer: errTracer,
|
||||
gitBlobLSIFDataResolver: gitBlobResolver,
|
||||
resolver: resolver,
|
||||
gitserver: gitserver,
|
||||
locationResolver: locationResolver,
|
||||
errTracer: errTracer,
|
||||
}
|
||||
}
|
||||
|
||||
@ -59,13 +62,18 @@ func (r *QueryResolver) ToGitBlobLSIFData() (gql.GitBlobLSIFDataResolver, bool)
|
||||
func (r *QueryResolver) Stencil(ctx context.Context) (_ []gql.RangeResolver, err error) {
|
||||
defer r.errTracer.Collect(&err, log.String("queryResolver.field", "stencil"))
|
||||
|
||||
ranges, err := r.queryResolver.Stencil(ctx)
|
||||
ranges, err := r.gitBlobLSIFDataResolver.Stencil(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resolvers := make([]gql.RangeResolver, 0, len(ranges))
|
||||
var adjustedRanges []lsifstore.Range
|
||||
for _, r := range ranges {
|
||||
adjustedRanges = append(adjustedRanges, sharedRangeTolsifstoreRange(r))
|
||||
}
|
||||
|
||||
resolvers := make([]gql.RangeResolver, 0, len(adjustedRanges))
|
||||
for _, r := range adjustedRanges {
|
||||
resolvers = append(resolvers, gql.NewRangeResolver(convertRange(r)))
|
||||
}
|
||||
|
||||
@ -79,13 +87,13 @@ func (r *QueryResolver) Ranges(ctx context.Context, args *gql.LSIFRangesArgs) (_
|
||||
return nil, ErrIllegalBounds
|
||||
}
|
||||
|
||||
ranges, err := r.queryResolver.Ranges(ctx, int(args.StartLine), int(args.EndLine))
|
||||
ranges, err := r.gitBlobLSIFDataResolver.Ranges(ctx, int(args.StartLine), int(args.EndLine))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &CodeIntelligenceRangeConnectionResolver{
|
||||
ranges: ranges,
|
||||
ranges: sharedRangeToAdjustedRange(ranges),
|
||||
locationResolver: r.locationResolver,
|
||||
}, nil
|
||||
}
|
||||
@ -93,7 +101,7 @@ func (r *QueryResolver) Ranges(ctx context.Context, args *gql.LSIFRangesArgs) (_
|
||||
func (r *QueryResolver) Definitions(ctx context.Context, args *gql.LSIFQueryPositionArgs) (_ gql.LocationConnectionResolver, err error) {
|
||||
defer r.errTracer.Collect(&err, log.String("queryResolver.field", "definitions"))
|
||||
|
||||
locations, err := r.queryResolver.Definitions(ctx, int(args.Line), int(args.Character))
|
||||
locations, err := r.gitBlobLSIFDataResolver.Definitions(ctx, int(args.Line), int(args.Character))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -108,7 +116,9 @@ func (r *QueryResolver) Definitions(ctx context.Context, args *gql.LSIFQueryPosi
|
||||
locations = filtered
|
||||
}
|
||||
|
||||
return NewLocationConnectionResolver(locations, nil, r.locationResolver), nil
|
||||
lct := uploadLocationToAdjustedLocations(locations)
|
||||
|
||||
return NewLocationConnectionResolver(lct, nil, r.locationResolver), nil
|
||||
}
|
||||
|
||||
func (r *QueryResolver) References(ctx context.Context, args *gql.LSIFPagedQueryPositionArgs) (_ gql.LocationConnectionResolver, err error) {
|
||||
@ -124,7 +134,7 @@ func (r *QueryResolver) References(ctx context.Context, args *gql.LSIFPagedQuery
|
||||
return nil, err
|
||||
}
|
||||
|
||||
locations, cursor, err := r.queryResolver.References(ctx, int(args.Line), int(args.Character), limit, cursor)
|
||||
locations, cursor, err := r.gitBlobLSIFDataResolver.References(ctx, int(args.Line), int(args.Character), limit, cursor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -139,7 +149,9 @@ func (r *QueryResolver) References(ctx context.Context, args *gql.LSIFPagedQuery
|
||||
locations = filtered
|
||||
}
|
||||
|
||||
return NewLocationConnectionResolver(locations, strPtr(cursor), r.locationResolver), nil
|
||||
lct := uploadLocationToAdjustedLocations(locations)
|
||||
|
||||
return NewLocationConnectionResolver(lct, strPtr(cursor), r.locationResolver), nil
|
||||
}
|
||||
|
||||
func (r *QueryResolver) Implementations(ctx context.Context, args *gql.LSIFPagedQueryPositionArgs) (_ gql.LocationConnectionResolver, err error) {
|
||||
@ -155,7 +167,7 @@ func (r *QueryResolver) Implementations(ctx context.Context, args *gql.LSIFPaged
|
||||
return nil, err
|
||||
}
|
||||
|
||||
locations, cursor, err := r.queryResolver.Implementations(ctx, int(args.Line), int(args.Character), limit, cursor)
|
||||
locations, cursor, err := r.gitBlobLSIFDataResolver.Implementations(ctx, int(args.Line), int(args.Character), limit, cursor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -170,32 +182,39 @@ func (r *QueryResolver) Implementations(ctx context.Context, args *gql.LSIFPaged
|
||||
locations = filtered
|
||||
}
|
||||
|
||||
return NewLocationConnectionResolver(locations, strPtr(cursor), r.locationResolver), nil
|
||||
lct := uploadLocationToAdjustedLocations(locations)
|
||||
|
||||
return NewLocationConnectionResolver(lct, strPtr(cursor), r.locationResolver), nil
|
||||
}
|
||||
|
||||
func (r *QueryResolver) Hover(ctx context.Context, args *gql.LSIFQueryPositionArgs) (_ gql.HoverResolver, err error) {
|
||||
defer r.errTracer.Collect(&err, log.String("queryResolver.field", "hover"))
|
||||
|
||||
text, rx, exists, err := r.queryResolver.Hover(ctx, int(args.Line), int(args.Character))
|
||||
text, rx, exists, err := r.gitBlobLSIFDataResolver.Hover(ctx, int(args.Line), int(args.Character))
|
||||
if err != nil || !exists {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewHoverResolver(text, convertRange(rx)), nil
|
||||
return NewHoverResolver(text, sharedRangeTolspRange(rx)), nil
|
||||
}
|
||||
|
||||
func (r *QueryResolver) LSIFUploads(ctx context.Context) (_ []gql.LSIFUploadResolver, err error) {
|
||||
defer r.errTracer.Collect(&err, log.String("queryResolver.field", "lsifUploads"))
|
||||
|
||||
uploads, err := r.queryResolver.LSIFUploads(ctx)
|
||||
uploads, err := r.gitBlobLSIFDataResolver.LSIFUploads(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dbUploads := []store.Upload{}
|
||||
for _, u := range uploads {
|
||||
dbUploads = append(dbUploads, sharedDumpToDbstoreUpload(u))
|
||||
}
|
||||
|
||||
prefetcher := NewPrefetcher(r.resolver)
|
||||
|
||||
resolvers := make([]gql.LSIFUploadResolver, 0, len(uploads))
|
||||
for _, upload := range uploads {
|
||||
resolvers := make([]gql.LSIFUploadResolver, 0, len(dbUploads))
|
||||
for _, upload := range dbUploads {
|
||||
resolvers = append(resolvers, NewUploadResolver(r.locationResolver.db, r.gitserver, r.resolver, upload, prefetcher, r.locationResolver, r.errTracer))
|
||||
}
|
||||
|
||||
@ -210,10 +229,12 @@ func (r *QueryResolver) Diagnostics(ctx context.Context, args *gql.LSIFDiagnosti
|
||||
return nil, ErrIllegalLimit
|
||||
}
|
||||
|
||||
diagnostics, totalCount, err := r.queryResolver.Diagnostics(ctx, limit)
|
||||
diagnostics, totalCount, err := r.gitBlobLSIFDataResolver.Diagnostics(ctx, limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewDiagnosticConnectionResolver(diagnostics, totalCount, r.locationResolver), nil
|
||||
adjustedDiag := sharedDiagnosticAtUploadToAdjustedDiagnostic(diagnostics)
|
||||
|
||||
return NewDiagnosticConnectionResolver(adjustedDiag, totalCount, r.locationResolver), nil
|
||||
}
|
||||
|
||||
@ -10,7 +10,8 @@ import (
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend/graphqlutil"
|
||||
resolvermocks "github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers/mocks"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/lsifstore"
|
||||
transportmocks "github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers/mocks/transport"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
)
|
||||
@ -19,22 +20,21 @@ func TestRanges(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), nil)
|
||||
|
||||
args := &gql.LSIFRangesArgs{StartLine: 10, EndLine: 20}
|
||||
if _, err := resolver.Ranges(context.Background(), args); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockQueryResolver.RangesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockQueryResolver.RangesFunc.History()))
|
||||
if len(mockGitBlobResolver.RangesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockGitBlobResolver.RangesFunc.History()))
|
||||
}
|
||||
if val := mockQueryResolver.RangesFunc.History()[0].Arg1; val != 10 {
|
||||
if val := mockGitBlobResolver.RangesFunc.History()[0].Arg1; val != 10 {
|
||||
t.Fatalf("unexpected start line. want=%d have=%d", 10, val)
|
||||
}
|
||||
if val := mockQueryResolver.RangesFunc.History()[0].Arg2; val != 20 {
|
||||
if val := mockGitBlobResolver.RangesFunc.History()[0].Arg2; val != 20 {
|
||||
t.Fatalf("unexpected end line. want=%d have=%d", 20, val)
|
||||
}
|
||||
}
|
||||
@ -43,22 +43,21 @@ func TestDefinitions(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), nil)
|
||||
|
||||
args := &gql.LSIFQueryPositionArgs{Line: 10, Character: 15}
|
||||
if _, err := resolver.Definitions(context.Background(), args); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockQueryResolver.DefinitionsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockQueryResolver.DefinitionsFunc.History()))
|
||||
if len(mockGitBlobResolver.DefinitionsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockGitBlobResolver.DefinitionsFunc.History()))
|
||||
}
|
||||
if val := mockQueryResolver.DefinitionsFunc.History()[0].Arg1; val != 10 {
|
||||
if val := mockGitBlobResolver.DefinitionsFunc.History()[0].Arg1; val != 10 {
|
||||
t.Fatalf("unexpected line. want=%d have=%d", 10, val)
|
||||
}
|
||||
if val := mockQueryResolver.DefinitionsFunc.History()[0].Arg2; val != 15 {
|
||||
if val := mockGitBlobResolver.DefinitionsFunc.History()[0].Arg2; val != 15 {
|
||||
t.Fatalf("unexpected character. want=%d have=%d", 15, val)
|
||||
}
|
||||
}
|
||||
@ -67,9 +66,8 @@ func TestReferences(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), nil)
|
||||
|
||||
offset := int32(25)
|
||||
cursor := base64.StdEncoding.EncodeToString([]byte("test-cursor"))
|
||||
@ -87,19 +85,19 @@ func TestReferences(t *testing.T) {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockQueryResolver.ReferencesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockQueryResolver.ReferencesFunc.History()))
|
||||
if len(mockGitBlobResolver.ReferencesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockGitBlobResolver.ReferencesFunc.History()))
|
||||
}
|
||||
if val := mockQueryResolver.ReferencesFunc.History()[0].Arg1; val != 10 {
|
||||
if val := mockGitBlobResolver.ReferencesFunc.History()[0].Arg1; val != 10 {
|
||||
t.Fatalf("unexpected line. want=%d have=%d", 10, val)
|
||||
}
|
||||
if val := mockQueryResolver.ReferencesFunc.History()[0].Arg2; val != 15 {
|
||||
if val := mockGitBlobResolver.ReferencesFunc.History()[0].Arg2; val != 15 {
|
||||
t.Fatalf("unexpected character. want=%d have=%d", 15, val)
|
||||
}
|
||||
if val := mockQueryResolver.ReferencesFunc.History()[0].Arg3; val != 25 {
|
||||
if val := mockGitBlobResolver.ReferencesFunc.History()[0].Arg3; val != 25 {
|
||||
t.Fatalf("unexpected character. want=%d have=%d", 25, val)
|
||||
}
|
||||
if val := mockQueryResolver.ReferencesFunc.History()[0].Arg4; val != "test-cursor" {
|
||||
if val := mockGitBlobResolver.ReferencesFunc.History()[0].Arg4; val != "test-cursor" {
|
||||
t.Fatalf("unexpected character. want=%s have=%s", "test-cursor", val)
|
||||
}
|
||||
}
|
||||
@ -108,9 +106,8 @@ func TestReferencesDefaultLimit(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), nil)
|
||||
|
||||
args := &gql.LSIFPagedQueryPositionArgs{
|
||||
LSIFQueryPositionArgs: gql.LSIFQueryPositionArgs{
|
||||
@ -124,10 +121,10 @@ func TestReferencesDefaultLimit(t *testing.T) {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockQueryResolver.ReferencesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockQueryResolver.DiagnosticsFunc.History()))
|
||||
if len(mockGitBlobResolver.ReferencesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockGitBlobResolver.DiagnosticsFunc.History()))
|
||||
}
|
||||
if val := mockQueryResolver.ReferencesFunc.History()[0].Arg3; val != DefaultReferencesPageSize {
|
||||
if val := mockGitBlobResolver.ReferencesFunc.History()[0].Arg3; val != DefaultReferencesPageSize {
|
||||
t.Fatalf("unexpected limit. want=%d have=%d", DefaultReferencesPageSize, val)
|
||||
}
|
||||
}
|
||||
@ -136,9 +133,8 @@ func TestReferencesDefaultIllegalLimit(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), observation.NewErrorCollector())
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), observation.NewErrorCollector())
|
||||
|
||||
offset := int32(-1)
|
||||
args := &gql.LSIFPagedQueryPositionArgs{
|
||||
@ -158,23 +154,23 @@ func TestHover(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockQueryResolver.HoverFunc.SetDefaultReturn("text", lsifstore.Range{}, true, nil)
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
mockGitBlobResolver.HoverFunc.SetDefaultReturn("text", shared.Range{}, true, nil)
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
|
||||
args := &gql.LSIFQueryPositionArgs{Line: 10, Character: 15}
|
||||
if _, err := resolver.Hover(context.Background(), args); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockQueryResolver.HoverFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockQueryResolver.HoverFunc.History()))
|
||||
if len(mockGitBlobResolver.HoverFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockGitBlobResolver.HoverFunc.History()))
|
||||
}
|
||||
if val := mockQueryResolver.HoverFunc.History()[0].Arg1; val != 10 {
|
||||
if val := mockGitBlobResolver.HoverFunc.History()[0].Arg1; val != 10 {
|
||||
t.Fatalf("unexpected line. want=%d have=%d", 10, val)
|
||||
}
|
||||
if val := mockQueryResolver.HoverFunc.History()[0].Arg2; val != 15 {
|
||||
if val := mockGitBlobResolver.HoverFunc.History()[0].Arg2; val != 15 {
|
||||
t.Fatalf("unexpected character. want=%d have=%d", 15, val)
|
||||
}
|
||||
}
|
||||
@ -183,9 +179,8 @@ func TestDiagnostics(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), nil)
|
||||
|
||||
offset := int32(25)
|
||||
args := &gql.LSIFDiagnosticsArgs{
|
||||
@ -196,10 +191,10 @@ func TestDiagnostics(t *testing.T) {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockQueryResolver.DiagnosticsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockQueryResolver.DiagnosticsFunc.History()))
|
||||
if len(mockGitBlobResolver.DiagnosticsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockGitBlobResolver.DiagnosticsFunc.History()))
|
||||
}
|
||||
if val := mockQueryResolver.DiagnosticsFunc.History()[0].Arg1; val != 25 {
|
||||
if val := mockGitBlobResolver.DiagnosticsFunc.History()[0].Arg1; val != 25 {
|
||||
t.Fatalf("unexpected limit. want=%d have=%d", 25, val)
|
||||
}
|
||||
}
|
||||
@ -208,9 +203,8 @@ func TestDiagnosticsDefaultLimit(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), nil)
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), nil)
|
||||
|
||||
args := &gql.LSIFDiagnosticsArgs{
|
||||
ConnectionArgs: graphqlutil.ConnectionArgs{},
|
||||
@ -220,10 +214,10 @@ func TestDiagnosticsDefaultLimit(t *testing.T) {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockQueryResolver.DiagnosticsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockQueryResolver.DiagnosticsFunc.History()))
|
||||
if len(mockGitBlobResolver.DiagnosticsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockGitBlobResolver.DiagnosticsFunc.History()))
|
||||
}
|
||||
if val := mockQueryResolver.DiagnosticsFunc.History()[0].Arg1; val != DefaultDiagnosticsPageSize {
|
||||
if val := mockGitBlobResolver.DiagnosticsFunc.History()[0].Arg1; val != DefaultDiagnosticsPageSize {
|
||||
t.Fatalf("unexpected limit. want=%d have=%d", DefaultDiagnosticsPageSize, val)
|
||||
}
|
||||
}
|
||||
@ -232,9 +226,8 @@ func TestDiagnosticsDefaultIllegalLimit(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, nil)
|
||||
|
||||
mockQueryResolver := resolvermocks.NewMockQueryResolver()
|
||||
mockResolver := resolvermocks.NewMockResolver()
|
||||
resolver := NewQueryResolver(nil, mockQueryResolver, mockResolver, NewCachedLocationResolver(db), observation.NewErrorCollector())
|
||||
mockGitBlobResolver := transportmocks.NewMockGitBlobLSIFDataResolver()
|
||||
resolver := NewQueryResolver(nil, mockGitBlobResolver, nil, NewCachedLocationResolver(db), observation.NewErrorCollector())
|
||||
|
||||
offset := int32(-1)
|
||||
args := &gql.LSIFDiagnosticsArgs{
|
||||
|
||||
@ -4,11 +4,10 @@ import (
|
||||
"context"
|
||||
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
)
|
||||
|
||||
type CodeIntelligenceRangeResolver struct {
|
||||
r resolvers.AdjustedCodeIntelligenceRange
|
||||
r AdjustedCodeIntelligenceRange
|
||||
locationResolver *CachedLocationResolver
|
||||
}
|
||||
|
||||
|
||||
@ -4,11 +4,10 @@ import (
|
||||
"context"
|
||||
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers"
|
||||
)
|
||||
|
||||
type CodeIntelligenceRangeConnectionResolver struct {
|
||||
ranges []resolvers.AdjustedCodeIntelligenceRange
|
||||
ranges []AdjustedCodeIntelligenceRange
|
||||
locationResolver *CachedLocationResolver
|
||||
}
|
||||
|
||||
|
||||
@ -78,6 +78,10 @@ func (r *Resolver) ExecutorResolver() executor.Resolver {
|
||||
return r.resolver.ExecutorResolver()
|
||||
}
|
||||
|
||||
func (r *Resolver) CodeNavResolver() resolvers.CodeNavResolver {
|
||||
return r.resolver.CodeNavResolver()
|
||||
}
|
||||
|
||||
// 🚨 SECURITY: dbstore layer handles authz for GetUploadByID
|
||||
func (r *Resolver) LSIFUploadByID(ctx context.Context, id graphql.ID) (_ gql.LSIFUploadResolver, err error) {
|
||||
ctx, traceErrs, endObservation := r.observationContext.lsifUploadByID.WithErrors(ctx, &err, observation.Args{LogFields: []log.Field{
|
||||
@ -310,12 +314,13 @@ func (r *Resolver) GitBlobLSIFData(ctx context.Context, args *gql.GitBlobLSIFDat
|
||||
ctx, errTracer, endObservation := r.observationContext.gitBlobLsifData.WithErrors(ctx, &err, observation.Args{})
|
||||
endObservation.OnCancel(ctx, 1, observation.Args{})
|
||||
|
||||
resolver, err := r.resolver.QueryResolver(ctx, args)
|
||||
if err != nil || resolver == nil {
|
||||
codenav := r.resolver.CodeNavResolver()
|
||||
gitBlobResolver, err := codenav.GitBlobLSIFDataResolverFactory(ctx, args.Repo, string(args.Commit), args.Path, args.ToolName, args.ExactPath)
|
||||
if err != nil || gitBlobResolver == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewQueryResolver(r.gitserver, resolver, r.resolver, r.locationResolver, errTracer), nil
|
||||
return NewQueryResolver(r.gitserver, gitBlobResolver, r.resolver, r.locationResolver, errTracer), nil
|
||||
}
|
||||
|
||||
func (r *Resolver) GitBlobCodeIntelInfo(ctx context.Context, args *gql.GitTreeEntryCodeIntelInfoArgs) (_ gql.GitBlobCodeIntelSupportResolver, err error) {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
package resolvers
|
||||
package graphql
|
||||
|
||||
import (
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
@ -3,7 +3,10 @@ package graphql
|
||||
import (
|
||||
"github.com/sourcegraph/go-lsp"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/lsifstore"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
)
|
||||
|
||||
// strPtr creates a pointer to the given value. If the value is an
|
||||
@ -72,3 +75,162 @@ func convertRange(r lsifstore.Range) lsp.Range {
|
||||
func convertPosition(line, character int) lsp.Position {
|
||||
return lsp.Position{Line: line, Character: character}
|
||||
}
|
||||
|
||||
func sharedRangeTolsifstoreRange(r shared.Range) lsifstore.Range {
|
||||
return lsifstore.Range{
|
||||
Start: lsifstore.Position(r.Start),
|
||||
End: lsifstore.Position(r.End),
|
||||
}
|
||||
}
|
||||
|
||||
func sharedRangeTolspRange(r shared.Range) lsp.Range {
|
||||
return lsp.Range{Start: convertPosition(r.Start.Line, r.Start.Character), End: convertPosition(r.End.Line, r.End.Character)}
|
||||
}
|
||||
|
||||
func sharedRangeToAdjustedRange(rng []shared.AdjustedCodeIntelligenceRange) []AdjustedCodeIntelligenceRange {
|
||||
adjustedRange := make([]AdjustedCodeIntelligenceRange, 0, len(rng))
|
||||
for _, r := range rng {
|
||||
|
||||
definitions := make([]AdjustedLocation, 0, len(r.Definitions))
|
||||
for _, d := range r.Definitions {
|
||||
def := AdjustedLocation{
|
||||
Dump: store.Dump(d.Dump),
|
||||
Path: d.Path,
|
||||
AdjustedCommit: d.TargetCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(d.TargetRange.Start),
|
||||
End: lsifstore.Position(d.TargetRange.End),
|
||||
},
|
||||
}
|
||||
definitions = append(definitions, def)
|
||||
}
|
||||
|
||||
references := make([]AdjustedLocation, 0, len(r.References))
|
||||
for _, d := range r.References {
|
||||
ref := AdjustedLocation{
|
||||
Dump: store.Dump(d.Dump),
|
||||
Path: d.Path,
|
||||
AdjustedCommit: d.TargetCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(d.TargetRange.Start),
|
||||
End: lsifstore.Position(d.TargetRange.End),
|
||||
},
|
||||
}
|
||||
references = append(references, ref)
|
||||
}
|
||||
|
||||
implementations := make([]AdjustedLocation, 0, len(r.Implementations))
|
||||
for _, d := range r.Implementations {
|
||||
impl := AdjustedLocation{
|
||||
Dump: store.Dump(d.Dump),
|
||||
Path: d.Path,
|
||||
AdjustedCommit: d.TargetCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(d.TargetRange.Start),
|
||||
End: lsifstore.Position(d.TargetRange.End),
|
||||
},
|
||||
}
|
||||
implementations = append(implementations, impl)
|
||||
}
|
||||
|
||||
adj := AdjustedCodeIntelligenceRange{
|
||||
Range: lsifstore.Range{
|
||||
Start: lsifstore.Position(r.Range.Start),
|
||||
End: lsifstore.Position(r.Range.End),
|
||||
},
|
||||
Definitions: definitions,
|
||||
References: references,
|
||||
Implementations: implementations,
|
||||
HoverText: r.HoverText,
|
||||
}
|
||||
|
||||
adjustedRange = append(adjustedRange, adj)
|
||||
}
|
||||
|
||||
return adjustedRange
|
||||
}
|
||||
|
||||
func uploadLocationToAdjustedLocations(location []shared.UploadLocation) []AdjustedLocation {
|
||||
uploadLocation := make([]AdjustedLocation, 0, len(location))
|
||||
for _, loc := range location {
|
||||
dump := store.Dump(loc.Dump)
|
||||
adjustedRange := lsifstore.Range{
|
||||
Start: lsifstore.Position{
|
||||
Line: loc.TargetRange.Start.Line,
|
||||
Character: loc.TargetRange.Start.Character,
|
||||
},
|
||||
End: lsifstore.Position{
|
||||
Line: loc.TargetRange.End.Line,
|
||||
Character: loc.TargetRange.End.Character,
|
||||
},
|
||||
}
|
||||
|
||||
uploadLocation = append(uploadLocation, AdjustedLocation{
|
||||
Dump: dump,
|
||||
Path: loc.Path,
|
||||
AdjustedCommit: loc.TargetCommit,
|
||||
AdjustedRange: adjustedRange,
|
||||
})
|
||||
}
|
||||
|
||||
return uploadLocation
|
||||
}
|
||||
|
||||
func sharedDumpToDbstoreUpload(dump shared.Dump) store.Upload {
|
||||
return store.Upload{
|
||||
ID: dump.ID,
|
||||
Commit: dump.Commit,
|
||||
Root: dump.Root,
|
||||
VisibleAtTip: dump.VisibleAtTip,
|
||||
UploadedAt: dump.UploadedAt,
|
||||
State: dump.State,
|
||||
FailureMessage: dump.FailureMessage,
|
||||
StartedAt: dump.StartedAt,
|
||||
FinishedAt: dump.FinishedAt,
|
||||
ProcessAfter: dump.ProcessAfter,
|
||||
NumResets: dump.NumResets,
|
||||
NumFailures: dump.NumFailures,
|
||||
RepositoryID: dump.RepositoryID,
|
||||
RepositoryName: dump.RepositoryName,
|
||||
Indexer: dump.Indexer,
|
||||
IndexerVersion: dump.IndexerVersion,
|
||||
NumParts: 0,
|
||||
UploadedParts: []int{},
|
||||
UploadSize: nil,
|
||||
Rank: nil,
|
||||
AssociatedIndexID: dump.AssociatedIndexID,
|
||||
}
|
||||
}
|
||||
|
||||
func sharedDiagnosticAtUploadToAdjustedDiagnostic(shared []shared.DiagnosticAtUpload) []AdjustedDiagnostic {
|
||||
adjustedDiagnostics := make([]AdjustedDiagnostic, 0, len(shared))
|
||||
for _, diag := range shared {
|
||||
diagnosticData := precise.DiagnosticData{
|
||||
Severity: diag.Severity,
|
||||
Code: diag.Code,
|
||||
Message: diag.Message,
|
||||
Source: diag.Source,
|
||||
StartLine: diag.StartLine,
|
||||
StartCharacter: diag.StartCharacter,
|
||||
EndLine: diag.EndLine,
|
||||
EndCharacter: diag.EndCharacter,
|
||||
}
|
||||
lsifDiag := lsifstore.Diagnostic{
|
||||
DiagnosticData: diagnosticData,
|
||||
DumpID: diag.DumpID,
|
||||
Path: diag.Path,
|
||||
}
|
||||
|
||||
adjusted := AdjustedDiagnostic{
|
||||
Diagnostic: lsifDiag,
|
||||
Dump: store.Dump(diag.Dump),
|
||||
AdjustedCommit: diag.AdjustedCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(diag.AdjustedRange.Start),
|
||||
End: lsifstore.Position(diag.AdjustedRange.End),
|
||||
},
|
||||
}
|
||||
adjustedDiagnostics = append(adjustedDiagnostics, adjusted)
|
||||
}
|
||||
return adjustedDiagnostics
|
||||
}
|
||||
|
||||
@ -8,12 +8,12 @@ import (
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/autoindexing"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav"
|
||||
codenavShared "github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
codenavgraphql "github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/transport/graphql"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
gs "github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver/gitdomain"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/autoindex/config"
|
||||
)
|
||||
|
||||
@ -75,16 +75,7 @@ type IndexEnqueuer interface {
|
||||
}
|
||||
|
||||
type CodeNavResolver interface {
|
||||
Definitions(ctx context.Context, args codenavShared.RequestArgs, requestState codenav.RequestState) (_ []codenavShared.UploadLocation, err error)
|
||||
Diagnostics(ctx context.Context, args codenavShared.RequestArgs, requestState codenav.RequestState) (diagnosticsAtUploads []codenavShared.DiagnosticAtUpload, _ int, err error)
|
||||
Hover(ctx context.Context, args codenavShared.RequestArgs, requestState codenav.RequestState) (string, codenavShared.Range, bool, error)
|
||||
Implementations(ctx context.Context, args codenavShared.RequestArgs, requestState codenav.RequestState) (_ []codenavShared.UploadLocation, _ string, err error)
|
||||
LSIFUploads(ctx context.Context, requestState codenav.RequestState) (uploads []codenavShared.Dump, err error)
|
||||
Ranges(ctx context.Context, args codenavShared.RequestArgs, requestState codenav.RequestState, startLine, endLine int) (adjustedRanges []codenavShared.AdjustedCodeIntelligenceRange, err error)
|
||||
References(ctx context.Context, args codenavShared.RequestArgs, requestState codenav.RequestState) (_ []codenavShared.UploadLocation, _ string, err error)
|
||||
Stencil(ctx context.Context, args codenavShared.RequestArgs, requestState codenav.RequestState) (adjustedRanges []codenavShared.Range, err error)
|
||||
|
||||
GetHunkCacheSize() int
|
||||
GitBlobLSIFDataResolverFactory(ctx context.Context, repo *types.Repo, commit, path, toolName string, exactPath bool) (_ codenavgraphql.GitBlobLSIFDataResolver, err error)
|
||||
}
|
||||
|
||||
type (
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,80 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/sourcegraph/log"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/metrics"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
)
|
||||
|
||||
type operations struct {
|
||||
queryResolver *observation.Operation
|
||||
findClosestDumps *observation.Operation
|
||||
}
|
||||
|
||||
func newOperations(observationContext *observation.Context) *operations {
|
||||
metrics := metrics.NewREDMetrics(
|
||||
observationContext.Registerer,
|
||||
"codeintel_resolvers",
|
||||
metrics.WithLabels("op"),
|
||||
metrics.WithCountHelp("Total number of resolver invocations."),
|
||||
)
|
||||
|
||||
op := func(name string) *observation.Operation {
|
||||
return observationContext.Operation(observation.Op{
|
||||
Name: fmt.Sprintf("codeintel.resolvers.%s", name),
|
||||
MetricLabelValues: []string{name},
|
||||
Metrics: metrics,
|
||||
ErrorFilter: func(err error) observation.ErrorFilterBehaviour {
|
||||
return observation.EmitForDefault
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// suboperations do not have their own metrics but do have their
|
||||
// own opentracing spans. This allows us to more granularly track
|
||||
// the latency for parts of a request without noising up Prometheus.
|
||||
subOp := func(name string) *observation.Operation {
|
||||
return observationContext.Operation(observation.Op{
|
||||
Name: fmt.Sprintf("codeintel.resolvers.%s", name),
|
||||
})
|
||||
}
|
||||
|
||||
return &operations{
|
||||
queryResolver: op("QueryResolver"),
|
||||
findClosestDumps: subOp("findClosestDumps"),
|
||||
}
|
||||
}
|
||||
|
||||
func observeResolver(
|
||||
ctx context.Context,
|
||||
err *error,
|
||||
operation *observation.Operation,
|
||||
threshold time.Duration,
|
||||
observationArgs observation.Args,
|
||||
) (context.Context, observation.TraceLogger, func()) {
|
||||
start := time.Now()
|
||||
ctx, trace, endObservation := operation.With(ctx, err, observationArgs)
|
||||
|
||||
return ctx, trace, func() {
|
||||
duration := time.Since(start)
|
||||
endObservation(1, observation.Args{})
|
||||
|
||||
if duration >= threshold {
|
||||
// use trace logger which includes all relevant fields
|
||||
lowSlowRequest(trace, duration, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lowSlowRequest(logger log.Logger, duration time.Duration, err *error) {
|
||||
fields := []log.Field{log.Duration("duration", duration)}
|
||||
if err != nil && *err != nil {
|
||||
fields = append(fields, log.Error(*err))
|
||||
}
|
||||
logger.Warn("Slow codeintel request", fields...)
|
||||
}
|
||||
@ -1,190 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/lsifstore"
|
||||
)
|
||||
|
||||
// QueryResolver is the main interface to bundle-related operations exposed to the GraphQL API. This
|
||||
// resolver consolidates the logic for bundle operations and is not itself concerned with GraphQL/API
|
||||
// specifics (auth, validation, marshaling, etc.). This resolver is wrapped by a symmetrics resolver
|
||||
// in this package's graphql subpackage, which is exposed directly by the API.
|
||||
type QueryResolver interface {
|
||||
LSIFUploads(ctx context.Context) ([]store.Upload, error)
|
||||
Ranges(ctx context.Context, startLine, endLine int) ([]AdjustedCodeIntelligenceRange, error)
|
||||
Stencil(ctx context.Context) ([]lsifstore.Range, error)
|
||||
Diagnostics(ctx context.Context, limit int) ([]AdjustedDiagnostic, int, error)
|
||||
Hover(ctx context.Context, line, character int) (string, lsifstore.Range, bool, error)
|
||||
Definitions(ctx context.Context, line, character int) ([]AdjustedLocation, error)
|
||||
References(ctx context.Context, line, character, limit int, rawCursor string) ([]AdjustedLocation, string, error)
|
||||
Implementations(ctx context.Context, line, character, limit int, rawCursor string) ([]AdjustedLocation, string, error)
|
||||
}
|
||||
|
||||
type queryResolver struct {
|
||||
repositoryID int
|
||||
commit string
|
||||
path string
|
||||
|
||||
operations *operations
|
||||
|
||||
codenavResolver CodeNavResolver
|
||||
requestState codenav.RequestState
|
||||
}
|
||||
|
||||
// NewQueryResolver create a new query resolver with the given services. The methods of this
|
||||
// struct return queries for the given repository, commit, and path, and will query only the
|
||||
// bundles associated with the given dump objects.
|
||||
func NewQueryResolver(repositoryID int, commit string, path string, operations *operations, codenavResolver CodeNavResolver, requestState codenav.RequestState) QueryResolver {
|
||||
return &queryResolver{
|
||||
operations: operations,
|
||||
repositoryID: repositoryID,
|
||||
commit: commit,
|
||||
path: path,
|
||||
codenavResolver: codenavResolver,
|
||||
requestState: requestState,
|
||||
}
|
||||
}
|
||||
|
||||
// LSIFUploads returns the list of dbstore.Uploads for the store.Dumps determined to be applicable
|
||||
// for answering code-intel queries.
|
||||
func (r *queryResolver) LSIFUploads(ctx context.Context) ([]dbstore.Upload, error) {
|
||||
uploads, err := r.codenavResolver.LSIFUploads(ctx, r.requestState)
|
||||
if err != nil {
|
||||
return []dbstore.Upload{}, err
|
||||
}
|
||||
|
||||
dbUploads := []dbstore.Upload{}
|
||||
for _, u := range uploads {
|
||||
dbUploads = append(dbUploads, sharedDumpToDbstoreUpload(u))
|
||||
}
|
||||
|
||||
return dbUploads, err
|
||||
}
|
||||
|
||||
// Ranges returns code intelligence for the ranges that fall within the given range of lines. These
|
||||
// results are partial and do not include references outside the current file, or any location that
|
||||
// requires cross-linking of bundles (cross-repo or cross-root).
|
||||
func (r *queryResolver) Ranges(ctx context.Context, startLine, endLine int) (adjustedRanges []AdjustedCodeIntelligenceRange, err error) {
|
||||
args := shared.RequestArgs{
|
||||
RepositoryID: r.repositoryID,
|
||||
Commit: r.commit,
|
||||
Path: r.path,
|
||||
}
|
||||
rngs, err := r.codenavResolver.Ranges(ctx, args, r.requestState, startLine, endLine)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
adjustedRanges = sharedRangeToAdjustedRange(rngs)
|
||||
|
||||
return adjustedRanges, nil
|
||||
}
|
||||
|
||||
// Stencil returns all ranges within a single document.
|
||||
func (r *queryResolver) Stencil(ctx context.Context) (adjustedRanges []lsifstore.Range, err error) {
|
||||
args := shared.RequestArgs{
|
||||
RepositoryID: r.repositoryID,
|
||||
Commit: r.commit,
|
||||
Path: r.path,
|
||||
}
|
||||
ranges, err := r.codenavResolver.Stencil(ctx, args, r.requestState)
|
||||
for _, r := range ranges {
|
||||
adjustedRanges = append(adjustedRanges, sharedRangeTolsifstoreRange(r))
|
||||
}
|
||||
return adjustedRanges, err
|
||||
}
|
||||
|
||||
// Diagnostics returns the diagnostics for documents with the given path prefix.
|
||||
func (r *queryResolver) Diagnostics(ctx context.Context, limit int) (adjustedDiagnostics []AdjustedDiagnostic, _ int, err error) {
|
||||
args := shared.RequestArgs{
|
||||
RepositoryID: r.repositoryID,
|
||||
Commit: r.commit,
|
||||
Path: r.path,
|
||||
Limit: limit,
|
||||
}
|
||||
diag, cursor, err := r.codenavResolver.Diagnostics(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
adjustedDiag := sharedDiagnosticAtUploadToAdjustedDiagnostic(diag)
|
||||
|
||||
return adjustedDiag, cursor, nil
|
||||
}
|
||||
|
||||
// Hover returns the hover text and range for the symbol at the given position.
|
||||
func (r *queryResolver) Hover(ctx context.Context, line, character int) (_ string, _ lsifstore.Range, _ bool, err error) {
|
||||
args := shared.RequestArgs{
|
||||
RepositoryID: r.repositoryID,
|
||||
Commit: r.commit,
|
||||
Path: r.path,
|
||||
Line: line,
|
||||
Character: character,
|
||||
}
|
||||
text, rnge, ok, err := r.codenavResolver.Hover(ctx, args, r.requestState)
|
||||
return text, sharedRangeTolsifstoreRange(rnge), ok, err
|
||||
}
|
||||
|
||||
// Definitions returns the list of source locations that define the symbol at the given position.
|
||||
func (r *queryResolver) Definitions(ctx context.Context, line, character int) (_ []AdjustedLocation, err error) {
|
||||
args := shared.RequestArgs{
|
||||
RepositoryID: r.repositoryID,
|
||||
Commit: r.commit,
|
||||
Path: r.path,
|
||||
Line: line,
|
||||
Character: character,
|
||||
}
|
||||
defs, err := r.codenavResolver.Definitions(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return uploadLocationToAdjustedLocations(defs), nil
|
||||
}
|
||||
|
||||
// References returns the list of source locations that reference the symbol at the given position.
|
||||
func (r *queryResolver) References(ctx context.Context, line, character, limit int, rawCursor string) (_ []AdjustedLocation, _ string, err error) {
|
||||
args := shared.RequestArgs{
|
||||
RepositoryID: r.repositoryID,
|
||||
Commit: r.commit,
|
||||
Path: r.path,
|
||||
Line: line,
|
||||
Character: character,
|
||||
Limit: limit,
|
||||
RawCursor: rawCursor,
|
||||
}
|
||||
refs, cursor, err := r.codenavResolver.References(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
adjstedLoc := uploadLocationToAdjustedLocations(refs)
|
||||
|
||||
return adjstedLoc, cursor, nil
|
||||
}
|
||||
|
||||
// Implementations returns the list of source locations that define the symbol at the given position.
|
||||
func (r *queryResolver) Implementations(ctx context.Context, line, character int, limit int, rawCursor string) (_ []AdjustedLocation, _ string, err error) {
|
||||
args := shared.RequestArgs{
|
||||
RepositoryID: r.repositoryID,
|
||||
Commit: r.commit,
|
||||
Path: r.path,
|
||||
Line: line,
|
||||
Character: character,
|
||||
Limit: limit,
|
||||
RawCursor: rawCursor,
|
||||
}
|
||||
impl, cursor, err := r.codenavResolver.Implementations(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
adjustedLoc := uploadLocationToAdjustedLocations(impl)
|
||||
|
||||
return adjustedLoc, cursor, nil
|
||||
}
|
||||
@ -1,222 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/lsifstore"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
)
|
||||
|
||||
// filterUploadsWithCommits removes the uploads for commits which are unknown to gitserver from the given
|
||||
// slice. The slice is filtered in-place and returned (to update the slice length).
|
||||
func filterUploadsWithCommits(ctx context.Context, cachedCommitChecker *cachedCommitChecker, uploads []store.Dump) ([]store.Dump, error) {
|
||||
rcs := make([]gitserver.RepositoryCommit, 0, len(uploads))
|
||||
for _, upload := range uploads {
|
||||
rcs = append(rcs, gitserver.RepositoryCommit{
|
||||
RepositoryID: upload.RepositoryID,
|
||||
Commit: upload.Commit,
|
||||
})
|
||||
}
|
||||
exists, err := cachedCommitChecker.existsBatch(ctx, rcs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filtered := uploads[:0]
|
||||
for i, upload := range uploads {
|
||||
if exists[i] {
|
||||
filtered = append(filtered, upload)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func uploadIDsToString(vs []store.Dump) string {
|
||||
ids := make([]string, 0, len(vs))
|
||||
for _, v := range vs {
|
||||
ids = append(ids, strconv.Itoa(v.ID))
|
||||
}
|
||||
|
||||
return strings.Join(ids, ", ")
|
||||
}
|
||||
|
||||
func sharedRangeTolsifstoreRange(r shared.Range) lsifstore.Range {
|
||||
return lsifstore.Range{
|
||||
Start: lsifstore.Position(r.Start),
|
||||
End: lsifstore.Position(r.End),
|
||||
}
|
||||
}
|
||||
|
||||
func sharedDiagnosticAtUploadToAdjustedDiagnostic(shared []shared.DiagnosticAtUpload) []AdjustedDiagnostic {
|
||||
adjustedDiagnostics := make([]AdjustedDiagnostic, 0, len(shared))
|
||||
for _, diag := range shared {
|
||||
diagnosticData := precise.DiagnosticData{
|
||||
Severity: diag.Severity,
|
||||
Code: diag.Code,
|
||||
Message: diag.Message,
|
||||
Source: diag.Source,
|
||||
StartLine: diag.StartLine,
|
||||
StartCharacter: diag.StartCharacter,
|
||||
EndLine: diag.EndLine,
|
||||
EndCharacter: diag.EndCharacter,
|
||||
}
|
||||
lsifDiag := lsifstore.Diagnostic{
|
||||
DiagnosticData: diagnosticData,
|
||||
DumpID: diag.DumpID,
|
||||
Path: diag.Path,
|
||||
}
|
||||
|
||||
adjusted := AdjustedDiagnostic{
|
||||
Diagnostic: lsifDiag,
|
||||
Dump: store.Dump(diag.Dump),
|
||||
AdjustedCommit: diag.AdjustedCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(diag.AdjustedRange.Start),
|
||||
End: lsifstore.Position(diag.AdjustedRange.End),
|
||||
},
|
||||
}
|
||||
adjustedDiagnostics = append(adjustedDiagnostics, adjusted)
|
||||
}
|
||||
return adjustedDiagnostics
|
||||
}
|
||||
|
||||
func sharedDumpToDbstoreUpload(dump shared.Dump) dbstore.Upload {
|
||||
return dbstore.Upload{
|
||||
ID: dump.ID,
|
||||
Commit: dump.Commit,
|
||||
Root: dump.Root,
|
||||
VisibleAtTip: dump.VisibleAtTip,
|
||||
UploadedAt: dump.UploadedAt,
|
||||
State: dump.State,
|
||||
FailureMessage: dump.FailureMessage,
|
||||
StartedAt: dump.StartedAt,
|
||||
FinishedAt: dump.FinishedAt,
|
||||
ProcessAfter: dump.ProcessAfter,
|
||||
NumResets: dump.NumResets,
|
||||
NumFailures: dump.NumFailures,
|
||||
RepositoryID: dump.RepositoryID,
|
||||
RepositoryName: dump.RepositoryName,
|
||||
Indexer: dump.Indexer,
|
||||
IndexerVersion: dump.IndexerVersion,
|
||||
NumParts: 0,
|
||||
UploadedParts: []int{},
|
||||
UploadSize: nil,
|
||||
Rank: nil,
|
||||
AssociatedIndexID: dump.AssociatedIndexID,
|
||||
}
|
||||
}
|
||||
|
||||
func sharedRangeToAdjustedRange(rng []shared.AdjustedCodeIntelligenceRange) []AdjustedCodeIntelligenceRange {
|
||||
adjustedRange := make([]AdjustedCodeIntelligenceRange, 0, len(rng))
|
||||
for _, r := range rng {
|
||||
|
||||
definitions := make([]AdjustedLocation, 0, len(r.Definitions))
|
||||
for _, d := range r.Definitions {
|
||||
def := AdjustedLocation{
|
||||
Dump: store.Dump(d.Dump),
|
||||
Path: d.Path,
|
||||
AdjustedCommit: d.TargetCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(d.TargetRange.Start),
|
||||
End: lsifstore.Position(d.TargetRange.End),
|
||||
},
|
||||
}
|
||||
definitions = append(definitions, def)
|
||||
}
|
||||
|
||||
references := make([]AdjustedLocation, 0, len(r.References))
|
||||
for _, d := range r.References {
|
||||
ref := AdjustedLocation{
|
||||
Dump: store.Dump(d.Dump),
|
||||
Path: d.Path,
|
||||
AdjustedCommit: d.TargetCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(d.TargetRange.Start),
|
||||
End: lsifstore.Position(d.TargetRange.End),
|
||||
},
|
||||
}
|
||||
references = append(references, ref)
|
||||
}
|
||||
|
||||
implementations := make([]AdjustedLocation, 0, len(r.Implementations))
|
||||
for _, d := range r.Implementations {
|
||||
impl := AdjustedLocation{
|
||||
Dump: store.Dump(d.Dump),
|
||||
Path: d.Path,
|
||||
AdjustedCommit: d.TargetCommit,
|
||||
AdjustedRange: lsifstore.Range{
|
||||
Start: lsifstore.Position(d.TargetRange.Start),
|
||||
End: lsifstore.Position(d.TargetRange.End),
|
||||
},
|
||||
}
|
||||
implementations = append(implementations, impl)
|
||||
}
|
||||
|
||||
adj := AdjustedCodeIntelligenceRange{
|
||||
Range: lsifstore.Range{
|
||||
Start: lsifstore.Position(r.Range.Start),
|
||||
End: lsifstore.Position(r.Range.End),
|
||||
},
|
||||
Definitions: definitions,
|
||||
References: references,
|
||||
Implementations: implementations,
|
||||
HoverText: r.HoverText,
|
||||
}
|
||||
|
||||
adjustedRange = append(adjustedRange, adj)
|
||||
}
|
||||
|
||||
return adjustedRange
|
||||
}
|
||||
|
||||
func uploadLocationToAdjustedLocations(location []shared.UploadLocation) []AdjustedLocation {
|
||||
uploadLocation := make([]AdjustedLocation, 0, len(location))
|
||||
for _, loc := range location {
|
||||
dump := store.Dump{
|
||||
ID: loc.Dump.ID,
|
||||
Commit: loc.Dump.Commit,
|
||||
Root: loc.Dump.Root,
|
||||
VisibleAtTip: loc.Dump.VisibleAtTip,
|
||||
UploadedAt: loc.Dump.UploadedAt,
|
||||
State: loc.Dump.State,
|
||||
FailureMessage: loc.Dump.FailureMessage,
|
||||
StartedAt: loc.Dump.StartedAt,
|
||||
FinishedAt: loc.Dump.FinishedAt,
|
||||
ProcessAfter: loc.Dump.ProcessAfter,
|
||||
NumResets: loc.Dump.NumResets,
|
||||
NumFailures: loc.Dump.NumFailures,
|
||||
RepositoryID: loc.Dump.RepositoryID,
|
||||
RepositoryName: loc.Dump.RepositoryName,
|
||||
Indexer: loc.Dump.Indexer,
|
||||
IndexerVersion: loc.Dump.IndexerVersion,
|
||||
AssociatedIndexID: loc.Dump.AssociatedIndexID,
|
||||
}
|
||||
|
||||
adjustedRange := lsifstore.Range{
|
||||
Start: lsifstore.Position{
|
||||
Line: loc.TargetRange.Start.Line,
|
||||
Character: loc.TargetRange.Start.Character,
|
||||
},
|
||||
End: lsifstore.Position{
|
||||
Line: loc.TargetRange.End.Line,
|
||||
Character: loc.TargetRange.End.Character,
|
||||
},
|
||||
}
|
||||
|
||||
uploadLocation = append(uploadLocation, AdjustedLocation{
|
||||
Dump: dump,
|
||||
Path: loc.Path,
|
||||
AdjustedCommit: loc.TargetCommit,
|
||||
AdjustedRange: adjustedRange,
|
||||
})
|
||||
}
|
||||
|
||||
return uploadLocation
|
||||
}
|
||||
@ -4,18 +4,11 @@ import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/opentracing/opentracing-go/log"
|
||||
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav"
|
||||
policies "github.com/sourcegraph/sourcegraph/internal/codeintel/policies/enterprise"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/conf"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
executor "github.com/sourcegraph/sourcegraph/internal/services/executors/transport/graphql"
|
||||
symbolsClient "github.com/sourcegraph/sourcegraph/internal/symbols"
|
||||
"github.com/sourcegraph/sourcegraph/internal/timeutil"
|
||||
@ -59,13 +52,13 @@ type Resolver interface {
|
||||
|
||||
UploadConnectionResolver(opts dbstore.GetUploadsOptions) *UploadsResolver
|
||||
IndexConnectionResolver(opts dbstore.GetIndexesOptions) *IndexesResolver
|
||||
QueryResolver(ctx context.Context, args *gql.GitBlobLSIFDataArgs) (QueryResolver, error)
|
||||
RepositorySummary(ctx context.Context, repositoryID int) (RepositorySummary, error)
|
||||
|
||||
RequestLanguageSupport(ctx context.Context, userID int, language string) error
|
||||
RequestedLanguageSupport(ctx context.Context, userID int) ([]string, error)
|
||||
|
||||
ExecutorResolver() executor.Resolver
|
||||
CodeNavResolver() CodeNavResolver
|
||||
}
|
||||
|
||||
type RepositorySummary struct {
|
||||
@ -76,19 +69,14 @@ type RepositorySummary struct {
|
||||
}
|
||||
|
||||
type resolver struct {
|
||||
db database.DB
|
||||
dbStore DBStore
|
||||
lsifStore LSIFStore
|
||||
gitserverClient GitserverClient
|
||||
policyMatcher *policies.Matcher
|
||||
indexEnqueuer IndexEnqueuer
|
||||
operations *operations
|
||||
executorResolver executor.Resolver
|
||||
symbolsClient *symbolsClient.Client
|
||||
|
||||
// See the same field on the QueryResolver struct
|
||||
maximumIndexesPerMonikerSearch int
|
||||
|
||||
codenavResolver CodeNavResolver
|
||||
}
|
||||
|
||||
@ -100,12 +88,10 @@ func NewResolver(
|
||||
policyMatcher *policies.Matcher,
|
||||
indexEnqueuer IndexEnqueuer,
|
||||
symbolsClient *symbolsClient.Client,
|
||||
maximumIndexesPerMonikerSearch int,
|
||||
observationContext *observation.Context,
|
||||
dbConn database.DB,
|
||||
codenavResolver CodeNavResolver,
|
||||
executorResolver executor.Resolver,
|
||||
) Resolver {
|
||||
return newResolver(dbStore, lsifStore, gitserverClient, policyMatcher, indexEnqueuer, symbolsClient, maximumIndexesPerMonikerSearch, observationContext, dbConn, codenavResolver)
|
||||
return newResolver(dbStore, lsifStore, gitserverClient, policyMatcher, indexEnqueuer, symbolsClient, codenavResolver, executorResolver)
|
||||
}
|
||||
|
||||
func newResolver(
|
||||
@ -115,23 +101,18 @@ func newResolver(
|
||||
policyMatcher *policies.Matcher,
|
||||
indexEnqueuer IndexEnqueuer,
|
||||
symbolsClient *symbolsClient.Client,
|
||||
maximumIndexesPerMonikerSearch int,
|
||||
observationContext *observation.Context,
|
||||
dbConn database.DB,
|
||||
codenavResolver CodeNavResolver,
|
||||
executorResolver executor.Resolver,
|
||||
) *resolver {
|
||||
return &resolver{
|
||||
db: dbConn,
|
||||
dbStore: dbStore,
|
||||
lsifStore: lsifStore,
|
||||
gitserverClient: gitserverClient,
|
||||
policyMatcher: policyMatcher,
|
||||
indexEnqueuer: indexEnqueuer,
|
||||
symbolsClient: symbolsClient,
|
||||
maximumIndexesPerMonikerSearch: maximumIndexesPerMonikerSearch,
|
||||
operations: newOperations(observationContext),
|
||||
executorResolver: executor.New(dbConn),
|
||||
codenavResolver: codenavResolver,
|
||||
dbStore: dbStore,
|
||||
lsifStore: lsifStore,
|
||||
gitserverClient: gitserverClient,
|
||||
policyMatcher: policyMatcher,
|
||||
indexEnqueuer: indexEnqueuer,
|
||||
symbolsClient: symbolsClient,
|
||||
executorResolver: executorResolver,
|
||||
codenavResolver: codenavResolver,
|
||||
}
|
||||
}
|
||||
|
||||
@ -194,49 +175,6 @@ func (r *resolver) QueueAutoIndexJobsForRepo(ctx context.Context, repositoryID i
|
||||
return r.indexEnqueuer.QueueIndexes(ctx, repositoryID, rev, configuration, true, true)
|
||||
}
|
||||
|
||||
const slowQueryResolverRequestThreshold = time.Second
|
||||
|
||||
// QueryResolver determines the set of dumps that can answer code intel queries for the
|
||||
// given repository, commit, and path, then constructs a new query resolver instance which
|
||||
// can be used to answer subsequent queries.
|
||||
func (r *resolver) QueryResolver(ctx context.Context, args *gql.GitBlobLSIFDataArgs) (_ QueryResolver, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.queryResolver, slowQueryResolverRequestThreshold, observation.Args{
|
||||
LogFields: []log.Field{
|
||||
log.Int("repositoryID", int(args.Repo.ID)),
|
||||
log.String("commit", string(args.Commit)),
|
||||
log.String("path", args.Path),
|
||||
log.Bool("exactPath", args.ExactPath),
|
||||
log.String("indexer", args.ToolName),
|
||||
},
|
||||
})
|
||||
defer endObservation()
|
||||
|
||||
repoId := int(args.Repo.ID)
|
||||
commit := string(args.Commit)
|
||||
cachedCommitChecker := newCachedCommitChecker(r.gitserverClient)
|
||||
cachedCommitChecker.set(repoId, commit)
|
||||
|
||||
// Maintain a map from identifers to hydrated upload records from the database. We use
|
||||
// this map as a quick lookup when constructing the resulting location set. Any additional
|
||||
// upload records pulled back from the database while processing this page will be added
|
||||
// to this map.
|
||||
dumps, err := r.findClosestDumps(ctx, cachedCommitChecker, repoId, commit, args.Path, args.ExactPath, args.ToolName)
|
||||
if err != nil || len(dumps) == 0 {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reqState := codenav.NewRequestState(
|
||||
dumps,
|
||||
authz.DefaultSubRepoPermsChecker,
|
||||
gitserver.NewClient(r.db), args.Repo, commit, args.Path,
|
||||
r.gitserverClient,
|
||||
r.maximumIndexesPerMonikerSearch,
|
||||
r.codenavResolver.GetHunkCacheSize(),
|
||||
)
|
||||
|
||||
return NewQueryResolver(repoId, commit, args.Path, r.operations, r.codenavResolver, *reqState), nil
|
||||
}
|
||||
|
||||
func (r *resolver) GetConfigurationPolicies(ctx context.Context, opts dbstore.GetConfigurationPoliciesOptions) ([]dbstore.ConfigurationPolicy, int, error) {
|
||||
return r.dbStore.GetConfigurationPolicies(ctx, opts)
|
||||
}
|
||||
|
||||
@ -1,35 +0,0 @@
|
||||
package resolvers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
gql "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
)
|
||||
|
||||
func TestQueryResolver(t *testing.T) {
|
||||
mockDBStore := NewMockDBStore() // returns no dumps
|
||||
mockLSIFStore := NewMockLSIFStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockCodeNavResolver := NewMockCodeNavResolver()
|
||||
|
||||
resolver := NewResolver(mockDBStore, mockLSIFStore, mockGitserverClient, nil, nil, nil, 50, &observation.TestContext, database.NewMockDB(), mockCodeNavResolver)
|
||||
queryResolver, err := resolver.QueryResolver(context.Background(), &gql.GitBlobLSIFDataArgs{
|
||||
Repo: &types.Repo{ID: 50},
|
||||
Commit: api.CommitID("deadbeef"),
|
||||
Path: "/foo/bar.go",
|
||||
ExactPath: true,
|
||||
ToolName: "lsif-go",
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if queryResolver != nil {
|
||||
t.Errorf("expected nil-valued resolver")
|
||||
}
|
||||
}
|
||||
@ -9,9 +9,7 @@ import (
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver/gitdomain"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
)
|
||||
|
||||
func TestRetentionPolicyOverview(t *testing.T) {
|
||||
@ -20,7 +18,7 @@ func TestRetentionPolicyOverview(t *testing.T) {
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockCodeNavResolver := NewMockCodeNavResolver()
|
||||
|
||||
resolver := NewResolver(mockDBStore, mockLSIFStore, mockGitserverClient, nil, nil, nil, 50, &observation.TestContext, database.NewMockDB(), mockCodeNavResolver)
|
||||
resolver := NewResolver(mockDBStore, mockLSIFStore, mockGitserverClient, nil, nil, nil, mockCodeNavResolver, nil)
|
||||
|
||||
mockClock := glock.NewMockClock()
|
||||
|
||||
@ -220,7 +218,7 @@ func TestRetentionPolicyOverview_ByVisibility(t *testing.T) {
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockCodeNavResolver := NewMockCodeNavResolver()
|
||||
|
||||
resolver := NewResolver(mockDBStore, mockLSIFStore, mockGitserverClient, nil, nil, nil, 50, &observation.TestContext, database.NewMockDB(), mockCodeNavResolver)
|
||||
resolver := NewResolver(mockDBStore, mockLSIFStore, mockGitserverClient, nil, nil, nil, mockCodeNavResolver, nil)
|
||||
|
||||
mockClock := glock.NewMockClock()
|
||||
|
||||
|
||||
@ -77,7 +77,7 @@ func NewServices(ctx context.Context, config *Config, siteConfig conftypes.Watch
|
||||
// Initialize services
|
||||
lsif := database.NewDBWith(observationContext.Logger, codeIntelDB)
|
||||
uploadSvc := uploads.GetService(db, lsif, gitserverClient)
|
||||
codenavSvc := codenav.GetService(db, lsif, uploadSvc)
|
||||
codenavSvc := codenav.GetService(db, lsif, uploadSvc, gitserverClient)
|
||||
indexEnqueuer := autoindexing.GetService(db, &autoindexing.DBStoreShim{Store: dbStore}, gitserverClient, repoUpdaterClient)
|
||||
|
||||
// Initialize http endpoints
|
||||
|
||||
@ -95,7 +95,7 @@ func enterpriseSetupHook(db database.DB, conf conftypes.UnifiedWatchable) enterp
|
||||
logger.Fatal(err.Error())
|
||||
}
|
||||
|
||||
if err := codeintel.Init(ctx, db, codeIntelConfig, &enterpriseServices, observationContext, services); err != nil {
|
||||
if err := codeintel.Init(ctx, db, codeIntelConfig, &enterpriseServices, services); err != nil {
|
||||
logger.Fatal("failed to initialize codeintel", log.Error(err))
|
||||
}
|
||||
|
||||
|
||||
@ -13,6 +13,8 @@ import (
|
||||
|
||||
type CommitCache interface {
|
||||
AreCommitsResolvable(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error)
|
||||
ExistsBatch(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error)
|
||||
SetResolvableCommit(repositoryID int, commit string)
|
||||
}
|
||||
|
||||
type commitCache struct {
|
||||
@ -21,13 +23,60 @@ type commitCache struct {
|
||||
cache map[int]map[string]bool
|
||||
}
|
||||
|
||||
func newCommitCache(client shared.GitserverClient) CommitCache {
|
||||
func NewCommitCache(client shared.GitserverClient) CommitCache {
|
||||
return &commitCache{
|
||||
gitserverClient: client,
|
||||
cache: map[int]map[string]bool{},
|
||||
}
|
||||
}
|
||||
|
||||
// ExistsBatch determines if the given commits are resolvable for the given repositories.
|
||||
// If we do not know the answer from a previous call to set or existsBatch, we ask gitserver
|
||||
// to resolve the remaining commits and store the results for subsequent calls. This method
|
||||
// returns a slice of the same size as the input slice, true indicating that the commit at
|
||||
// the symmetric index exists.
|
||||
func (c *commitCache) ExistsBatch(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error) {
|
||||
exists := make([]bool, len(commits))
|
||||
rcIndexMap := make([]int, 0, len(commits))
|
||||
rcs := make([]gitserver.RepositoryCommit, 0, len(commits))
|
||||
|
||||
for i, rc := range commits {
|
||||
if e, ok := c.getInternal(rc.RepositoryID, rc.Commit); ok {
|
||||
exists[i] = e
|
||||
} else {
|
||||
rcIndexMap = append(rcIndexMap, i)
|
||||
rcs = append(rcs, gitserver.RepositoryCommit{
|
||||
RepositoryID: rc.RepositoryID,
|
||||
Commit: rc.Commit,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if len(rcs) == 0 {
|
||||
return exists, nil
|
||||
}
|
||||
|
||||
// Perform heavy work outside of critical section
|
||||
e, err := c.gitserverClient.CommitsExist(ctx, rcs)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "gitserverClient.CommitsExist")
|
||||
}
|
||||
if len(e) != len(rcs) {
|
||||
panic(strings.Join([]string{
|
||||
fmt.Sprintf("Expected slice returned from CommitsExist to have len %d, but has len %d.", len(rcs), len(e)),
|
||||
"If this panic occurred dcuring a test, your test is missing a mock definition for CommitsExist.",
|
||||
"If this is occurred during runtime, please file a bug.",
|
||||
}, " "))
|
||||
}
|
||||
|
||||
for i, rc := range rcs {
|
||||
exists[rcIndexMap[i]] = e[i]
|
||||
c.setInternal(rc.RepositoryID, rc.Commit, e[i])
|
||||
}
|
||||
|
||||
return exists, nil
|
||||
}
|
||||
|
||||
// AreCommitsResolvable determines if the given commits are resolvable for the given repositories.
|
||||
// If we do not know the answer from a previous call to set or AreCommitsResolvable, we ask gitserver
|
||||
// to resolve the remaining commits and store the results for subsequent calls. This method
|
||||
@ -76,6 +125,11 @@ func (c *commitCache) AreCommitsResolvable(ctx context.Context, commits []gitser
|
||||
return exists, nil
|
||||
}
|
||||
|
||||
// set marks the given repository and commit as valid and resolvable by gitserver.
|
||||
func (c *commitCache) SetResolvableCommit(repositoryID int, commit string) {
|
||||
c.setInternal(repositoryID, commit, true)
|
||||
}
|
||||
|
||||
func (c *commitCache) getInternal(repositoryID int, commit string) (bool, bool) {
|
||||
c.mutex.RLock()
|
||||
defer c.mutex.RUnlock()
|
||||
|
||||
@ -10,7 +10,6 @@ import (
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
)
|
||||
|
||||
@ -38,7 +37,7 @@ type GitTreeTranslator interface {
|
||||
}
|
||||
|
||||
type gitTreeTranslator struct {
|
||||
client gitserver.Client
|
||||
client shared.GitserverClient
|
||||
localRequestArgs *requestArgs
|
||||
hunkCache HunkCache
|
||||
}
|
||||
@ -74,7 +73,7 @@ func NewHunkCache(size int) (HunkCache, error) {
|
||||
}
|
||||
|
||||
// NewGitTreeTranslator creates a new GitTreeTranslator with the given repository and source commit.
|
||||
func NewGitTreeTranslator(client gitserver.Client, args *requestArgs, hunkCache HunkCache) GitTreeTranslator {
|
||||
func NewGitTreeTranslator(client shared.GitserverClient, args *requestArgs, hunkCache HunkCache) GitTreeTranslator {
|
||||
return &gitTreeTranslator{
|
||||
client: client,
|
||||
hunkCache: hunkCache,
|
||||
|
||||
@ -11,12 +11,14 @@ import (
|
||||
"github.com/sourcegraph/go-diff/diff"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
)
|
||||
|
||||
var client = gitserver.NewClient(database.NewMockDB())
|
||||
var client = codeintelgitserver.New(database.NewMockDB(), NewMockDBStore(), &observation.TestContext)
|
||||
|
||||
func TestGetTargetCommitPathFromSourcePath(t *testing.T) {
|
||||
args := &requestArgs{
|
||||
|
||||
@ -3,6 +3,10 @@ package codenav
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/sourcegraph/go-diff/diff"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
uploads "github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/shared"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
@ -12,8 +16,15 @@ type UploadService interface {
|
||||
GetDumpsWithDefinitionsForMonikers(ctx context.Context, monikers []precise.QualifiedMonikerData) (_ []uploads.Dump, err error)
|
||||
GetUploadIDsWithReferences(ctx context.Context, orderedMonikers []precise.QualifiedMonikerData, ignoreIDs []int, repositoryID int, commit string, limit int, offset int) (ids []int, recordsScanned int, totalCount int, err error)
|
||||
GetDumpsByIDs(ctx context.Context, ids []int) (_ []uploads.Dump, err error)
|
||||
InferClosestUploads(ctx context.Context, repositoryID int, commit, path string, exactPath bool, indexer string) (_ []uploads.Dump, err error)
|
||||
}
|
||||
|
||||
type GitserverClient interface {
|
||||
CommitsExist(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error)
|
||||
DiffPath(ctx context.Context, checker authz.SubRepoPermissionChecker, repo api.RepoName, sourceCommit, targetCommit, path string) ([]*diff.Hunk, error)
|
||||
}
|
||||
|
||||
type DBStore interface {
|
||||
RepoName(ctx context.Context, repositoryID int) (string, error)
|
||||
RepoNames(ctx context.Context, repositoryIDs ...int) (map[int]string, error)
|
||||
}
|
||||
|
||||
@ -22,7 +22,7 @@ var (
|
||||
|
||||
// GetService creates or returns an already-initialized symbols service. If the service is
|
||||
// new, it will use the given database handle.
|
||||
func GetService(db, codeIntelDB database.DB, uploadSvc UploadService) *Service {
|
||||
func GetService(db, codeIntelDB database.DB, uploadSvc UploadService, gitserver GitserverClient) *Service {
|
||||
svcOnce.Do(func() {
|
||||
oc := func(name string) *observation.Context {
|
||||
return &observation.Context{
|
||||
@ -34,7 +34,7 @@ func GetService(db, codeIntelDB database.DB, uploadSvc UploadService) *Service {
|
||||
|
||||
store := store.New(db, oc("store"))
|
||||
lsifstore := lsifstore.New(codeIntelDB, oc("lsifstore"))
|
||||
svc = newService(store, lsifstore, uploadSvc, oc("service"))
|
||||
svc = newService(store, lsifstore, uploadSvc, gitserver, oc("service"))
|
||||
})
|
||||
|
||||
return svc
|
||||
|
||||
@ -38,6 +38,8 @@ type LsifStore interface {
|
||||
|
||||
// Ranges
|
||||
GetRanges(ctx context.Context, bundleID int, path string, startLine, endLine int) (_ []shared.CodeIntelligenceRange, err error)
|
||||
|
||||
GetPathExists(ctx context.Context, bundleID int, path string) (_ bool, err error)
|
||||
}
|
||||
|
||||
type store struct {
|
||||
|
||||
@ -0,0 +1,28 @@
|
||||
package lsifstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/keegancsmith/sqlf"
|
||||
"github.com/opentracing/opentracing-go/log"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/basestore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
)
|
||||
|
||||
// GetPathExists determines if the path exists in the database.
|
||||
func (s *store) GetPathExists(ctx context.Context, bundleID int, path string) (_ bool, err error) {
|
||||
ctx, _, endObservation := s.operations.getExists.With(ctx, &err, observation.Args{LogFields: []log.Field{
|
||||
log.Int("bundleID", bundleID),
|
||||
log.String("path", path),
|
||||
}})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
_, exists, err := basestore.ScanFirstString(s.db.Query(ctx, sqlf.Sprintf(existsQuery, bundleID, path)))
|
||||
return exists, err
|
||||
}
|
||||
|
||||
const existsQuery = `
|
||||
-- source: internal/codeintel/stores/lsifstore/exists.go:Exists
|
||||
SELECT path FROM lsif_data_documents WHERE dump_id = %s AND path = %s LIMIT 1
|
||||
`
|
||||
@ -0,0 +1,27 @@
|
||||
package lsifstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDatabaseExists(t *testing.T) {
|
||||
store := populateTestStore(t)
|
||||
|
||||
testCases := []struct {
|
||||
path string
|
||||
expected bool
|
||||
}{
|
||||
{"cmd/lsif-go/main.go", true},
|
||||
{"internal/index/indexer.go", true},
|
||||
{"missing.go", false},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
if exists, err := store.GetPathExists(context.Background(), testBundleID, testCase.path); err != nil {
|
||||
t.Fatalf("unexpected error %s", err)
|
||||
} else if exists != testCase.expected {
|
||||
t.Errorf("unexpected exists result for %s. want=%v have=%v", testCase.path, testCase.expected, exists)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -15,6 +15,7 @@ type operations struct {
|
||||
getDiagnostics *observation.Operation
|
||||
getRanges *observation.Operation
|
||||
getStencil *observation.Operation
|
||||
getExists *observation.Operation
|
||||
getMonikersByPosition *observation.Operation
|
||||
getPackageInformation *observation.Operation
|
||||
getBulkMonikerResults *observation.Operation
|
||||
@ -56,6 +57,7 @@ func newOperations(observationContext *observation.Context) *operations {
|
||||
getDiagnostics: op("GetDiagnostics"),
|
||||
getRanges: op("GetRanges"),
|
||||
getStencil: op("GetStencil"),
|
||||
getExists: op("GetExists"),
|
||||
getMonikersByPosition: op("GetMonikersByPosition"),
|
||||
getPackageInformation: op("GetPackageInformation"),
|
||||
getBulkMonikerResults: op("GetBulkMonikerResults"),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,166 +0,0 @@
|
||||
// Code generated by go-mockgen 1.3.3; DO NOT EDIT.
|
||||
//
|
||||
// This file was generated by running `sg generate` (or `go-mockgen`) at the root of
|
||||
// this repository. To add additional mocks to this or another package, add a new entry
|
||||
// to the mockgen.yaml file in the root of this repository.
|
||||
|
||||
package codenav
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
store "github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/internal/store"
|
||||
shared "github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
)
|
||||
|
||||
// MockStore is a mock implementation of the Store interface (from the
|
||||
// package
|
||||
// github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/internal/store)
|
||||
// used for unit testing.
|
||||
type MockStore struct {
|
||||
// ListFunc is an instance of a mock function object controlling the
|
||||
// behavior of the method List.
|
||||
ListFunc *StoreListFunc
|
||||
}
|
||||
|
||||
// NewMockStore creates a new mock of the Store interface. All methods
|
||||
// return zero values for all results, unless overwritten.
|
||||
func NewMockStore() *MockStore {
|
||||
return &MockStore{
|
||||
ListFunc: &StoreListFunc{
|
||||
defaultHook: func(context.Context, store.ListOpts) (r0 []shared.Symbol, r1 error) {
|
||||
return
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// NewStrictMockStore creates a new mock of the Store interface. All methods
|
||||
// panic on invocation, unless overwritten.
|
||||
func NewStrictMockStore() *MockStore {
|
||||
return &MockStore{
|
||||
ListFunc: &StoreListFunc{
|
||||
defaultHook: func(context.Context, store.ListOpts) ([]shared.Symbol, error) {
|
||||
panic("unexpected invocation of MockStore.List")
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// NewMockStoreFrom creates a new mock of the MockStore interface. All
|
||||
// methods delegate to the given implementation, unless overwritten.
|
||||
func NewMockStoreFrom(i store.Store) *MockStore {
|
||||
return &MockStore{
|
||||
ListFunc: &StoreListFunc{
|
||||
defaultHook: i.List,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// StoreListFunc describes the behavior when the List method of the parent
|
||||
// MockStore instance is invoked.
|
||||
type StoreListFunc struct {
|
||||
defaultHook func(context.Context, store.ListOpts) ([]shared.Symbol, error)
|
||||
hooks []func(context.Context, store.ListOpts) ([]shared.Symbol, error)
|
||||
history []StoreListFuncCall
|
||||
mutex sync.Mutex
|
||||
}
|
||||
|
||||
// List delegates to the next hook function in the queue and stores the
|
||||
// parameter and result values of this invocation.
|
||||
func (m *MockStore) List(v0 context.Context, v1 store.ListOpts) ([]shared.Symbol, error) {
|
||||
r0, r1 := m.ListFunc.nextHook()(v0, v1)
|
||||
m.ListFunc.appendCall(StoreListFuncCall{v0, v1, r0, r1})
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// SetDefaultHook sets function that is called when the List method of the
|
||||
// parent MockStore instance is invoked and the hook queue is empty.
|
||||
func (f *StoreListFunc) SetDefaultHook(hook func(context.Context, store.ListOpts) ([]shared.Symbol, error)) {
|
||||
f.defaultHook = hook
|
||||
}
|
||||
|
||||
// PushHook adds a function to the end of hook queue. Each invocation of the
|
||||
// List method of the parent MockStore instance invokes the hook at the
|
||||
// front of the queue and discards it. After the queue is empty, the default
|
||||
// hook function is invoked for any future action.
|
||||
func (f *StoreListFunc) PushHook(hook func(context.Context, store.ListOpts) ([]shared.Symbol, error)) {
|
||||
f.mutex.Lock()
|
||||
f.hooks = append(f.hooks, hook)
|
||||
f.mutex.Unlock()
|
||||
}
|
||||
|
||||
// SetDefaultReturn calls SetDefaultHook with a function that returns the
|
||||
// given values.
|
||||
func (f *StoreListFunc) SetDefaultReturn(r0 []shared.Symbol, r1 error) {
|
||||
f.SetDefaultHook(func(context.Context, store.ListOpts) ([]shared.Symbol, error) {
|
||||
return r0, r1
|
||||
})
|
||||
}
|
||||
|
||||
// PushReturn calls PushHook with a function that returns the given values.
|
||||
func (f *StoreListFunc) PushReturn(r0 []shared.Symbol, r1 error) {
|
||||
f.PushHook(func(context.Context, store.ListOpts) ([]shared.Symbol, error) {
|
||||
return r0, r1
|
||||
})
|
||||
}
|
||||
|
||||
func (f *StoreListFunc) nextHook() func(context.Context, store.ListOpts) ([]shared.Symbol, error) {
|
||||
f.mutex.Lock()
|
||||
defer f.mutex.Unlock()
|
||||
|
||||
if len(f.hooks) == 0 {
|
||||
return f.defaultHook
|
||||
}
|
||||
|
||||
hook := f.hooks[0]
|
||||
f.hooks = f.hooks[1:]
|
||||
return hook
|
||||
}
|
||||
|
||||
func (f *StoreListFunc) appendCall(r0 StoreListFuncCall) {
|
||||
f.mutex.Lock()
|
||||
f.history = append(f.history, r0)
|
||||
f.mutex.Unlock()
|
||||
}
|
||||
|
||||
// History returns a sequence of StoreListFuncCall objects describing the
|
||||
// invocations of this function.
|
||||
func (f *StoreListFunc) History() []StoreListFuncCall {
|
||||
f.mutex.Lock()
|
||||
history := make([]StoreListFuncCall, len(f.history))
|
||||
copy(history, f.history)
|
||||
f.mutex.Unlock()
|
||||
|
||||
return history
|
||||
}
|
||||
|
||||
// StoreListFuncCall is an object that describes an invocation of method
|
||||
// List on an instance of MockStore.
|
||||
type StoreListFuncCall struct {
|
||||
// Arg0 is the value of the 1st argument passed to this method
|
||||
// invocation.
|
||||
Arg0 context.Context
|
||||
// Arg1 is the value of the 2nd argument passed to this method
|
||||
// invocation.
|
||||
Arg1 store.ListOpts
|
||||
// Result0 is the value of the 1st result returned from this method
|
||||
// invocation.
|
||||
Result0 []shared.Symbol
|
||||
// Result1 is the value of the 2nd result returned from this method
|
||||
// invocation.
|
||||
Result1 error
|
||||
}
|
||||
|
||||
// Args returns an interface slice containing the arguments of this
|
||||
// invocation.
|
||||
func (c StoreListFuncCall) Args() []interface{} {
|
||||
return []interface{}{c.Arg0, c.Arg1}
|
||||
}
|
||||
|
||||
// Results returns an interface slice containing the results of this
|
||||
// invocation.
|
||||
func (c StoreListFuncCall) Results() []interface{} {
|
||||
return []interface{}{c.Result0, c.Result1}
|
||||
}
|
||||
3307
internal/codeintel/codenav/mocks_test.go
Normal file
3307
internal/codeintel/codenav/mocks_test.go
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -28,6 +28,7 @@ type operations struct {
|
||||
getUploadsWithDefinitionsForMonikers *observation.Operation
|
||||
getUploadIDsWithReferences *observation.Operation
|
||||
getDumpsByIDs *observation.Operation
|
||||
getClosestDumpsForBlob *observation.Operation
|
||||
}
|
||||
|
||||
func newOperations(observationContext *observation.Context) *operations {
|
||||
@ -63,6 +64,7 @@ func newOperations(observationContext *observation.Context) *operations {
|
||||
getUploadsWithDefinitionsForMonikers: op("GetUploadsWithDefinitionsForMonikers"),
|
||||
getUploadIDsWithReferences: op("GetUploadIDsWithReferences"),
|
||||
getDumpsByIDs: op("GetDumpsByIDs"),
|
||||
getClosestDumpsForBlob: op("GetClosestDumpsForBlob"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -5,8 +5,6 @@ import (
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
)
|
||||
|
||||
@ -27,28 +25,27 @@ type RequestState struct {
|
||||
}
|
||||
|
||||
func NewRequestState(
|
||||
uploads []dbstore.Dump,
|
||||
uploads []shared.Dump,
|
||||
authChecker authz.SubRepoPermissionChecker,
|
||||
client gitserver.Client, repo *types.Repo, commit, path string,
|
||||
gitclient shared.GitserverClient,
|
||||
gitclient shared.GitserverClient, repo *types.Repo, commit, path string,
|
||||
maxIndexes int,
|
||||
hunkCacheSize int,
|
||||
) *RequestState {
|
||||
) RequestState {
|
||||
r := &RequestState{}
|
||||
r.SetUploadsDataLoader(uploads)
|
||||
r.SetAuthChecker(authChecker)
|
||||
r.SetLocalGitTreeTranslator(client, repo, commit, path, hunkCacheSize)
|
||||
r.SetLocalGitTreeTranslator(gitclient, repo, commit, path, hunkCacheSize)
|
||||
r.SetLocalCommitCache(gitclient)
|
||||
r.SetMaximumIndexesPerMonikerSearch(maxIndexes)
|
||||
|
||||
return r
|
||||
return *r
|
||||
}
|
||||
|
||||
func (r *RequestState) GetCacheUploads() []shared.Dump {
|
||||
func (r RequestState) GetCacheUploads() []shared.Dump {
|
||||
return r.dataLoader.uploads
|
||||
}
|
||||
|
||||
func (r *RequestState) GetCacheUploadsAtIndex(index int) shared.Dump {
|
||||
func (r RequestState) GetCacheUploadsAtIndex(index int) shared.Dump {
|
||||
if index < 0 || index >= len(r.dataLoader.uploads) {
|
||||
return shared.Dump{}
|
||||
}
|
||||
@ -60,14 +57,14 @@ func (r *RequestState) SetAuthChecker(authChecker authz.SubRepoPermissionChecker
|
||||
r.authChecker = authChecker
|
||||
}
|
||||
|
||||
func (r *RequestState) SetUploadsDataLoader(uploads []dbstore.Dump) {
|
||||
func (r *RequestState) SetUploadsDataLoader(uploads []shared.Dump) {
|
||||
r.dataLoader = NewUploadsDataLoader()
|
||||
for _, upload := range uploads {
|
||||
r.dataLoader.AddUpload(upload)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *RequestState) SetLocalGitTreeTranslator(client gitserver.Client, repo *types.Repo, commit, path string, hunkCacheSize int) error {
|
||||
func (r *RequestState) SetLocalGitTreeTranslator(client shared.GitserverClient, repo *types.Repo, commit, path string, hunkCacheSize int) error {
|
||||
hunkCache, err := NewHunkCache(hunkCacheSize)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -85,7 +82,7 @@ func (r *RequestState) SetLocalGitTreeTranslator(client gitserver.Client, repo *
|
||||
}
|
||||
|
||||
func (r *RequestState) SetLocalCommitCache(client shared.GitserverClient) {
|
||||
r.commitCache = newCommitCache(client)
|
||||
r.commitCache = NewCommitCache(client)
|
||||
}
|
||||
|
||||
func (r *RequestState) SetMaximumIndexesPerMonikerSearch(maxNumber int) {
|
||||
@ -121,29 +118,10 @@ func (l *UploadsDataLoader) SetUploadInCacheMap(uploads []shared.Dump) {
|
||||
}
|
||||
}
|
||||
|
||||
func (l *UploadsDataLoader) AddUpload(d dbstore.Dump) {
|
||||
func (l *UploadsDataLoader) AddUpload(dump shared.Dump) {
|
||||
l.cacheMutex.Lock()
|
||||
defer l.cacheMutex.Unlock()
|
||||
|
||||
dump := shared.Dump{
|
||||
ID: d.ID,
|
||||
Commit: d.Commit,
|
||||
Root: d.Root,
|
||||
VisibleAtTip: d.VisibleAtTip,
|
||||
UploadedAt: d.UploadedAt,
|
||||
State: d.State,
|
||||
FailureMessage: d.FailureMessage,
|
||||
StartedAt: d.StartedAt,
|
||||
FinishedAt: d.FinishedAt,
|
||||
ProcessAfter: d.ProcessAfter,
|
||||
NumResets: d.NumResets,
|
||||
NumFailures: d.NumFailures,
|
||||
RepositoryID: d.RepositoryID,
|
||||
RepositoryName: d.RepositoryName,
|
||||
Indexer: d.Indexer,
|
||||
IndexerVersion: d.IndexerVersion,
|
||||
AssociatedIndexID: d.AssociatedIndexID,
|
||||
}
|
||||
l.uploads = append(l.uploads, dump)
|
||||
l.uploadsByID[dump.ID] = dump
|
||||
}
|
||||
|
||||
@ -33,6 +33,7 @@ type service interface {
|
||||
GetMonikersByPosition(ctx context.Context, bundleID int, path string, line, character int) (_ [][]precise.MonikerData, err error)
|
||||
GetBulkMonikerLocations(ctx context.Context, tableName string, uploadIDs []int, monikers []precise.MonikerData, limit, offset int) (_ []shared.Location, _ int, err error)
|
||||
GetPackageInformation(ctx context.Context, bundleID int, path, packageInformationID string) (_ precise.PackageInformationData, _ bool, err error)
|
||||
GetClosestDumpsForBlob(ctx context.Context, repositoryID int, commit, path string, exactPath bool, indexer string) (_ []shared.Dump, err error)
|
||||
|
||||
// Uploads Service
|
||||
GetDumpsByIDs(ctx context.Context, ids []int) (_ []shared.Dump, err error)
|
||||
@ -43,14 +44,16 @@ type service interface {
|
||||
type Service struct {
|
||||
store store.Store
|
||||
lsifstore lsifstore.LsifStore
|
||||
gitserver GitserverClient
|
||||
uploadSvc UploadService
|
||||
operations *operations
|
||||
}
|
||||
|
||||
func newService(store store.Store, lsifstore lsifstore.LsifStore, uploadSvc UploadService, observationContext *observation.Context) *Service {
|
||||
func newService(store store.Store, lsifstore lsifstore.LsifStore, uploadSvc UploadService, gitserver GitserverClient, observationContext *observation.Context) *Service {
|
||||
return &Service{
|
||||
store: store,
|
||||
lsifstore: lsifstore,
|
||||
gitserver: gitserver,
|
||||
uploadSvc: uploadSvc,
|
||||
operations: newOperations(observationContext),
|
||||
}
|
||||
@ -1192,6 +1195,93 @@ func (s *Service) GetPackageInformation(ctx context.Context, bundleID int, path,
|
||||
return s.lsifstore.GetPackageInformation(ctx, bundleID, path, packageInformationID)
|
||||
}
|
||||
|
||||
func (s *Service) GetClosestDumpsForBlob(ctx context.Context, repositoryID int, commit, path string, exactPath bool, indexer string) (_ []shared.Dump, err error) {
|
||||
ctx, trace, endObservation := s.operations.getClosestDumpsForBlob.With(ctx, &err, observation.Args{
|
||||
LogFields: []traceLog.Field{
|
||||
traceLog.Int("repositoryID", repositoryID),
|
||||
traceLog.String("commit", commit),
|
||||
traceLog.String("path", path),
|
||||
traceLog.Bool("exactPath", exactPath),
|
||||
traceLog.String("indexer", indexer),
|
||||
},
|
||||
})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
candidates, err := s.uploadSvc.InferClosestUploads(ctx, repositoryID, commit, path, exactPath, indexer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
uploadCandidates := updateSvcDumpToSharedDump(candidates)
|
||||
trace.Log(
|
||||
traceLog.Int("numCandidates", len(candidates)),
|
||||
traceLog.String("candidates", uploadIDsToString(uploadCandidates)),
|
||||
)
|
||||
|
||||
commitChecker := NewCommitCache(s.gitserver)
|
||||
commitChecker.SetResolvableCommit(repositoryID, commit)
|
||||
|
||||
candidatesWithCommits, err := filterUploadsWithCommits(ctx, commitChecker, uploadCandidates)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
trace.Log(
|
||||
traceLog.Int("numCandidatesWithCommits", len(candidatesWithCommits)),
|
||||
traceLog.String("candidatesWithCommits", uploadIDsToString(candidatesWithCommits)),
|
||||
)
|
||||
|
||||
// Filter in-place
|
||||
filtered := candidatesWithCommits[:0]
|
||||
|
||||
for i := range candidatesWithCommits {
|
||||
if exactPath {
|
||||
// TODO - this breaks if the file was renamed in git diff
|
||||
pathExists, err := s.lsifstore.GetPathExists(ctx, candidates[i].ID, strings.TrimPrefix(path, candidates[i].Root))
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "lsifStore.Exists")
|
||||
}
|
||||
if !pathExists {
|
||||
continue
|
||||
}
|
||||
} else { //nolint:staticcheck
|
||||
// TODO(efritz) - ensure there's a valid document path for this condition as well
|
||||
}
|
||||
|
||||
filtered = append(filtered, uploadCandidates[i])
|
||||
}
|
||||
trace.Log(
|
||||
traceLog.Int("numFiltered", len(filtered)),
|
||||
traceLog.String("filtered", uploadIDsToString(filtered)),
|
||||
)
|
||||
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
// filterUploadsWithCommits removes the uploads for commits which are unknown to gitserver from the given
|
||||
// slice. The slice is filtered in-place and returned (to update the slice length).
|
||||
func filterUploadsWithCommits(ctx context.Context, commitCache CommitCache, uploads []shared.Dump) ([]shared.Dump, error) {
|
||||
rcs := make([]codeintelgitserver.RepositoryCommit, 0, len(uploads))
|
||||
for _, upload := range uploads {
|
||||
rcs = append(rcs, codeintelgitserver.RepositoryCommit{
|
||||
RepositoryID: upload.RepositoryID,
|
||||
Commit: upload.Commit,
|
||||
})
|
||||
}
|
||||
exists, err := commitCache.ExistsBatch(ctx, rcs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filtered := uploads[:0]
|
||||
for i, upload := range uploads {
|
||||
if exists[i] {
|
||||
filtered = append(filtered, upload)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func updateSvcDumpToSharedDump(uploadDumps []uploads.Dump) []shared.Dump {
|
||||
dumps := make([]shared.Dump, 0, len(uploadDumps))
|
||||
for _, d := range uploadDumps {
|
||||
|
||||
@ -5,17 +5,13 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/actor"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
uploadsShared "github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
@ -26,19 +22,18 @@ func TestDefinitions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: mockCommit, Root: "sub1/"},
|
||||
{ID: 51, Commit: mockCommit, Root: "sub2/"},
|
||||
{ID: 52, Commit: mockCommit, Root: "sub3/"},
|
||||
@ -66,13 +61,12 @@ func TestDefinitions(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying definitions: %s", err)
|
||||
}
|
||||
sharedUploads := storeDumpToSymbolDump(uploads)
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: sharedUploads[1], Path: "sub2/a.go", TargetCommit: mockCommit, TargetRange: testRange1},
|
||||
{Dump: sharedUploads[1], Path: "sub2/b.go", TargetCommit: mockCommit, TargetRange: testRange2},
|
||||
{Dump: sharedUploads[1], Path: "sub2/a.go", TargetCommit: mockCommit, TargetRange: testRange3},
|
||||
{Dump: sharedUploads[1], Path: "sub2/b.go", TargetCommit: mockCommit, TargetRange: testRange4},
|
||||
{Dump: sharedUploads[1], Path: "sub2/c.go", TargetCommit: mockCommit, TargetRange: testRange5},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: mockCommit, TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: mockCommit, TargetRange: testRange2},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: mockCommit, TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: mockCommit, TargetRange: testRange4},
|
||||
{Dump: uploads[1], Path: "sub2/c.go", TargetCommit: mockCommit, TargetRange: testRange5},
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
@ -85,19 +79,18 @@ func TestDefinitionsWithSubRepoPermissions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: mockCommit, Root: "sub1/"},
|
||||
{ID: 51, Commit: mockCommit, Root: "sub2/"},
|
||||
{ID: 52, Commit: mockCommit, Root: "sub3/"},
|
||||
@ -139,10 +132,10 @@ func TestDefinitionsWithSubRepoPermissions(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying definitions: %s", err)
|
||||
}
|
||||
sharedUploads := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: sharedUploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: sharedUploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
@ -154,13 +147,12 @@ func TestDefinitionsRemote(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
@ -170,7 +162,7 @@ func TestDefinitionsRemote(t *testing.T) {
|
||||
t.Fatalf("unexpected error setting local git tree translator: %s", err)
|
||||
}
|
||||
mockRequestState.GitTreeTranslator = mockedGitTreeTranslator()
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -278,19 +270,18 @@ func TestDefinitionsRemoteWithSubRepoPermissions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{ID: 42}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
|
||||
@ -6,15 +6,11 @@ import (
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/actor"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
@ -25,19 +21,18 @@ func TestDiagnostics(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -72,13 +67,13 @@ func TestDiagnostics(t *testing.T) {
|
||||
if totalCount != 30 {
|
||||
t.Errorf("unexpected count. want=%d have=%d", 30, totalCount)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedDiagnostics := []shared.DiagnosticAtUpload{
|
||||
{Dump: u[0], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub1/", DiagnosticData: precise.DiagnosticData{Code: "c1"}}},
|
||||
{Dump: u[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c2"}}},
|
||||
{Dump: u[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c3"}}},
|
||||
{Dump: u[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c4"}}},
|
||||
{Dump: u[2], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub3/", DiagnosticData: precise.DiagnosticData{Code: "c5"}}},
|
||||
{Dump: uploads[0], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub1/", DiagnosticData: precise.DiagnosticData{Code: "c1"}}},
|
||||
{Dump: uploads[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c2"}}},
|
||||
{Dump: uploads[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c3"}}},
|
||||
{Dump: uploads[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c4"}}},
|
||||
{Dump: uploads[2], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub3/", DiagnosticData: precise.DiagnosticData{Code: "c5"}}},
|
||||
}
|
||||
if diff := cmp.Diff(expectedDiagnostics, adjustedDiagnostics); diff != "" {
|
||||
t.Errorf("unexpected diagnostics (-want +got):\n%s", diff)
|
||||
@ -98,19 +93,18 @@ func TestDiagnosticsWithSubRepoPermissions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -159,11 +153,11 @@ func TestDiagnosticsWithSubRepoPermissions(t *testing.T) {
|
||||
if totalCount != 30 {
|
||||
t.Errorf("unexpected count. want=%d have=%d", 30, totalCount)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedDiagnostics := []shared.DiagnosticAtUpload{
|
||||
{Dump: u[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c2"}}},
|
||||
{Dump: u[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c3"}}},
|
||||
{Dump: u[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c4"}}},
|
||||
{Dump: uploads[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c2"}}},
|
||||
{Dump: uploads[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c3"}}},
|
||||
{Dump: uploads[1], AdjustedCommit: "deadbeef", Diagnostic: shared.Diagnostic{Path: "sub2/", DiagnosticData: precise.DiagnosticData{Code: "c4"}}},
|
||||
}
|
||||
if diff := cmp.Diff(expectedDiagnostics, adjustedDiagnostics); diff != "" {
|
||||
t.Errorf("unexpected diagnostics (-want +got):\n%s", diff)
|
||||
|
||||
@ -6,15 +6,10 @@ import (
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
uploadsShared "github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
@ -25,19 +20,18 @@ func TestHover(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{ID: 42}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -81,19 +75,18 @@ func TestHoverRemote(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{ID: 42}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef"},
|
||||
}
|
||||
mockRequestState.SetUploadsDataLoader(uploads)
|
||||
|
||||
@ -6,17 +6,12 @@ import (
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/actor"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
uploadsShared "github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
@ -27,13 +22,12 @@ func TestImplementations(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
@ -54,7 +48,7 @@ func TestImplementations(t *testing.T) {
|
||||
mockLsifStore.GetImplementationLocationsFunc.PushReturn(locations[1:4], 3, nil)
|
||||
mockLsifStore.GetImplementationLocationsFunc.PushReturn(locations[4:], 1, nil)
|
||||
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -74,13 +68,13 @@ func TestImplementations(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying implementations: %s", err)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: u[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
@ -92,13 +86,12 @@ func TestImplementationsWithSubRepoPermissions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
@ -119,7 +112,7 @@ func TestImplementationsWithSubRepoPermissions(t *testing.T) {
|
||||
mockLsifStore.GetImplementationLocationsFunc.PushReturn(locations[1:4], 3, nil)
|
||||
mockLsifStore.GetImplementationLocationsFunc.PushReturn(locations[4:], 1, nil)
|
||||
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -154,10 +147,10 @@ func TestImplementationsWithSubRepoPermissions(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying implementations: %s", err)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
@ -169,19 +162,18 @@ func TestImplementationsRemote(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{ID: 42}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -269,18 +261,17 @@ func TestImplementationsRemote(t *testing.T) {
|
||||
t.Fatalf("unexpected error querying references: %s", err)
|
||||
}
|
||||
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: u[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: u[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: u[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: u[3], Path: "sub4/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: uploads[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[3], Path: "sub4/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
@ -329,19 +320,18 @@ func TestImplementationsRemoteWithSubRepoPermissions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -443,12 +433,12 @@ func TestImplementationsRemoteWithSubRepoPermissions(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying references: %s", err)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: u[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
|
||||
@ -6,13 +6,9 @@ import (
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
)
|
||||
@ -22,19 +18,18 @@ func TestRanges(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -75,15 +70,15 @@ func TestRanges(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying ranges: %s", err)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
adjustedLocation1 := shared.UploadLocation{Dump: u[0], Path: "sub1/a.go", TargetCommit: "deadbeef", TargetRange: testRange1}
|
||||
adjustedLocation2 := shared.UploadLocation{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2}
|
||||
adjustedLocation3 := shared.UploadLocation{Dump: u[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange1}
|
||||
adjustedLocation4 := shared.UploadLocation{Dump: u[1], Path: "sub2/d.go", TargetCommit: "deadbeef", TargetRange: testRange2}
|
||||
adjustedLocation5 := shared.UploadLocation{Dump: u[1], Path: "sub2/e.go", TargetCommit: "deadbeef", TargetRange: testRange1}
|
||||
adjustedLocation6 := shared.UploadLocation{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange2}
|
||||
adjustedLocation7 := shared.UploadLocation{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3}
|
||||
adjustedLocation8 := shared.UploadLocation{Dump: u[2], Path: "sub3/a.go", TargetCommit: "deadbeef", TargetRange: testRange4}
|
||||
|
||||
adjustedLocation1 := shared.UploadLocation{Dump: uploads[0], Path: "sub1/a.go", TargetCommit: "deadbeef", TargetRange: testRange1}
|
||||
adjustedLocation2 := shared.UploadLocation{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2}
|
||||
adjustedLocation3 := shared.UploadLocation{Dump: uploads[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange1}
|
||||
adjustedLocation4 := shared.UploadLocation{Dump: uploads[1], Path: "sub2/d.go", TargetCommit: "deadbeef", TargetRange: testRange2}
|
||||
adjustedLocation5 := shared.UploadLocation{Dump: uploads[1], Path: "sub2/e.go", TargetCommit: "deadbeef", TargetRange: testRange1}
|
||||
adjustedLocation6 := shared.UploadLocation{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange2}
|
||||
adjustedLocation7 := shared.UploadLocation{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3}
|
||||
adjustedLocation8 := shared.UploadLocation{Dump: uploads[2], Path: "sub3/a.go", TargetCommit: "deadbeef", TargetRange: testRange4}
|
||||
|
||||
expectedRanges := []shared.AdjustedCodeIntelligenceRange{
|
||||
{Range: testRange1, HoverText: "text1", Definitions: []shared.UploadLocation{}, References: []shared.UploadLocation{adjustedLocation1}, Implementations: []shared.UploadLocation{}},
|
||||
|
||||
@ -6,8 +6,6 @@ import (
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/actor"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
@ -15,8 +13,6 @@ import (
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
uploadsShared "github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
"github.com/sourcegraph/sourcegraph/lib/codeintel/precise"
|
||||
@ -38,19 +34,18 @@ func TestReferences(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -85,13 +80,13 @@ func TestReferences(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying references: %s", err)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: u[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
@ -103,19 +98,18 @@ func TestReferencesWithSubRepoPermissions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -165,10 +159,9 @@ func TestReferencesWithSubRepoPermissions(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying references: %s", err)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
@ -180,19 +173,18 @@ func TestReferencesRemote(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -291,18 +283,17 @@ func TestReferencesRemote(t *testing.T) {
|
||||
t.Fatalf("unexpected error querying references: %s", err)
|
||||
}
|
||||
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: u[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: u[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: u[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: u[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: u[3], Path: "sub4/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[1], Path: "sub2/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[1], Path: "sub2/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
{Dump: uploads[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange1},
|
||||
{Dump: uploads[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[3], Path: "sub4/a.go", TargetCommit: "deadbeef", TargetRange: testRange3},
|
||||
{Dump: uploads[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[3], Path: "sub4/c.go", TargetCommit: "deadbeef", TargetRange: testRange5},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
@ -358,23 +349,18 @@ func TestReferencesRemoteWithSubRepoPermissions(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// // Init resolver and set local request context
|
||||
// resolver := New(mockSvc, 50, &observation.TestContext)
|
||||
// resolver.SetLocalCommitCache(mockGitserverClient)
|
||||
// resolver.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath)
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
@ -478,12 +464,12 @@ func TestReferencesRemoteWithSubRepoPermissions(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error querying references: %s", err)
|
||||
}
|
||||
u := storeDumpToSymbolDump(uploads)
|
||||
|
||||
expectedLocations := []shared.UploadLocation{
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: u[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: u[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[1], Path: "sub2/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
{Dump: uploads[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange2},
|
||||
{Dump: uploads[3], Path: "sub4/b.go", TargetCommit: "deadbeef", TargetRange: testRange4},
|
||||
}
|
||||
if diff := cmp.Diff(expectedLocations, adjustedLocations); diff != "" {
|
||||
t.Errorf("unexpected locations (-want +got):\n%s", diff)
|
||||
|
||||
@ -6,13 +6,9 @@ import (
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore"
|
||||
codeintelgitserver "github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
)
|
||||
@ -22,19 +18,18 @@ func TestStencil(t *testing.T) {
|
||||
mockStore := NewMockStore()
|
||||
mockLsifStore := NewMockLsifStore()
|
||||
mockUploadSvc := NewMockUploadService()
|
||||
mockLogger := logtest.Scoped(t)
|
||||
mockDB := database.NewDB(mockLogger, dbtest.NewDB(mockLogger, t))
|
||||
mockGitServer := gitserver.NewClient(mockDB)
|
||||
mockDBStore := NewMockDBStore()
|
||||
mockGitserverClient := NewMockGitserverClient()
|
||||
mockGitServer := codeintelgitserver.New(database.NewMockDB(), mockDBStore, &observation.TestContext)
|
||||
|
||||
// Init service
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, &observation.TestContext)
|
||||
svc := newService(mockStore, mockLsifStore, mockUploadSvc, mockGitserverClient, &observation.TestContext)
|
||||
|
||||
// Set up request state
|
||||
mockRequestState := RequestState{}
|
||||
mockRequestState.SetLocalCommitCache(mockGitserverClient)
|
||||
mockRequestState.SetLocalGitTreeTranslator(mockGitServer, &types.Repo{}, mockCommit, mockPath, 50)
|
||||
uploads := []dbstore.Dump{
|
||||
uploads := []shared.Dump{
|
||||
{ID: 50, Commit: "deadbeef", Root: "sub1/"},
|
||||
{ID: 51, Commit: "deadbeef", Root: "sub2/"},
|
||||
{ID: 52, Commit: "deadbeef", Root: "sub3/"},
|
||||
|
||||
@ -3,9 +3,14 @@ package shared
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/sourcegraph/go-diff/diff"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
)
|
||||
|
||||
type GitserverClient interface {
|
||||
CommitsExist(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error)
|
||||
DiffPath(ctx context.Context, checker authz.SubRepoPermissionChecker, repo api.RepoName, sourceCommit, targetCommit, path string) ([]*diff.Hunk, error)
|
||||
}
|
||||
|
||||
207
internal/codeintel/codenav/transport/graphql/gitblob_resolver.go
Normal file
207
internal/codeintel/codenav/transport/graphql/gitblob_resolver.go
Normal file
@ -0,0 +1,207 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/opentracing/opentracing-go/log"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
|
||||
// GitBlobLSIFDataResolver is the main interface to bundle-related operations exposed to the GraphQL API. This
|
||||
// resolver consolidates the logic for bundle operations and is not itself concerned with GraphQL/API
|
||||
// specifics (auth, validation, marshaling, etc.). This resolver is wrapped by a symmetrics resolver
|
||||
// in this package's graphql subpackage, which is exposed directly by the API.
|
||||
type GitBlobLSIFDataResolver interface {
|
||||
LSIFUploads(ctx context.Context) ([]shared.Dump, error)
|
||||
Ranges(ctx context.Context, startLine, endLine int) ([]shared.AdjustedCodeIntelligenceRange, error)
|
||||
Stencil(ctx context.Context) ([]shared.Range, error)
|
||||
Diagnostics(ctx context.Context, limit int) ([]shared.DiagnosticAtUpload, int, error)
|
||||
Hover(ctx context.Context, line, character int) (string, shared.Range, bool, error)
|
||||
Definitions(ctx context.Context, line, character int) ([]shared.UploadLocation, error)
|
||||
References(ctx context.Context, line, character, limit int, rawCursor string) ([]shared.UploadLocation, string, error)
|
||||
Implementations(ctx context.Context, line, character, limit int, rawCursor string) ([]shared.UploadLocation, string, error)
|
||||
}
|
||||
|
||||
type gitBlobLSIFDataResolver struct {
|
||||
svc Service
|
||||
|
||||
repositoryID int
|
||||
commit string
|
||||
path string
|
||||
|
||||
operations *operations
|
||||
|
||||
// codenavResolver CodeNavResolver
|
||||
requestState codenav.RequestState
|
||||
}
|
||||
|
||||
// NewGitBlobLSIFDataResolver create a new query resolver with the given services. The methods of this
|
||||
// struct return queries for the given repository, commit, and path, and will query only the
|
||||
// bundles associated with the given dump objects.
|
||||
func NewGitBlobLSIFDataResolver(svc Service, repositoryID int, commit, path string, operations *operations, requestState codenav.RequestState) GitBlobLSIFDataResolver {
|
||||
return &gitBlobLSIFDataResolver{
|
||||
svc: svc,
|
||||
|
||||
repositoryID: repositoryID,
|
||||
commit: commit,
|
||||
path: path,
|
||||
|
||||
operations: operations,
|
||||
|
||||
requestState: requestState,
|
||||
}
|
||||
}
|
||||
|
||||
// Definitions returns the list of source locations that define the symbol at the given position.
|
||||
func (r *gitBlobLSIFDataResolver) Definitions(ctx context.Context, line, character int) (_ []shared.UploadLocation, err error) {
|
||||
args := shared.RequestArgs{RepositoryID: r.repositoryID, Commit: r.commit, Path: r.path, Line: line, Character: character}
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.definitions, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
def, err := r.svc.GetDefinitions(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "svc.GetDefinitions")
|
||||
}
|
||||
|
||||
return def, nil
|
||||
}
|
||||
|
||||
// Diagnostics returns the diagnostics for documents with the given path prefix.
|
||||
func (r *gitBlobLSIFDataResolver) Diagnostics(ctx context.Context, limit int) (diagnosticsAtUploads []shared.DiagnosticAtUpload, _ int, err error) {
|
||||
args := shared.RequestArgs{RepositoryID: r.repositoryID, Commit: r.commit, Path: r.path, Limit: limit}
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.diagnostics, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
diag, totalCount, err := r.svc.GetDiagnostics(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return nil, 0, errors.Wrap(err, "svc.GetDiagnostics")
|
||||
}
|
||||
|
||||
return diag, totalCount, nil
|
||||
}
|
||||
|
||||
// Hover returns the hover text and range for the symbol at the given position.
|
||||
func (r *gitBlobLSIFDataResolver) Hover(ctx context.Context, line, character int) (_ string, _ shared.Range, _ bool, err error) {
|
||||
args := shared.RequestArgs{RepositoryID: r.repositoryID, Commit: r.commit, Path: r.path, Line: line, Character: character}
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.hover, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
hover, rng, ok, err := r.svc.GetHover(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return "", shared.Range{}, false, err
|
||||
}
|
||||
|
||||
return hover, rng, ok, err
|
||||
}
|
||||
|
||||
// Implementations returns the list of source locations that define the symbol at the given position.
|
||||
func (r *gitBlobLSIFDataResolver) Implementations(ctx context.Context, line, character int, limit int, rawCursor string) (_ []shared.UploadLocation, nextCursor string, err error) {
|
||||
args := shared.RequestArgs{RepositoryID: r.repositoryID, Commit: r.commit, Path: r.path, Line: line, Character: character, Limit: limit, RawCursor: rawCursor}
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.implementations, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
// Decode cursor given from previous response or create a new one with default values.
|
||||
// We use the cursor state track offsets with the result set and cache initial data that
|
||||
// is used to resolve each page. This cursor will be modified in-place to become the
|
||||
// cursor used to fetch the subsequent page of results in this result set.
|
||||
cursor, err := decodeImplementationsCursor(rawCursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, fmt.Sprintf("invalid cursor: %q", rawCursor))
|
||||
}
|
||||
|
||||
impls, implsCursor, err := r.svc.GetImplementations(ctx, args, r.requestState, cursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, "svc.GetImplementations")
|
||||
}
|
||||
|
||||
if implsCursor.Phase != "done" {
|
||||
nextCursor = encodeImplementationsCursor(implsCursor)
|
||||
}
|
||||
|
||||
return impls, nextCursor, nil
|
||||
}
|
||||
|
||||
// LSIFUploads returns the list of dbstore.Uploads for the store.Dumps determined to be applicable
|
||||
// for answering code-intel queries.
|
||||
func (r *gitBlobLSIFDataResolver) LSIFUploads(ctx context.Context) (uploads []shared.Dump, err error) {
|
||||
cacheUploads := r.requestState.GetCacheUploads()
|
||||
ids := make([]int, 0, len(cacheUploads))
|
||||
for _, dump := range cacheUploads {
|
||||
ids = append(ids, dump.ID)
|
||||
}
|
||||
|
||||
dumps, err := r.svc.GetDumpsByIDs(ctx, ids)
|
||||
|
||||
return dumps, err
|
||||
}
|
||||
|
||||
// Ranges returns code intelligence for the ranges that fall within the given range of lines. These
|
||||
// results are partial and do not include references outside the current file, or any location that
|
||||
// requires cross-linking of bundles (cross-repo or cross-root).
|
||||
func (r *gitBlobLSIFDataResolver) Ranges(ctx context.Context, startLine, endLine int) (adjustedRanges []shared.AdjustedCodeIntelligenceRange, err error) {
|
||||
args := shared.RequestArgs{RepositoryID: r.repositoryID, Commit: r.commit, Path: r.path}
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.ranges, time.Second, observation.Args{
|
||||
LogFields: []log.Field{
|
||||
log.Int("repositoryID", args.RepositoryID),
|
||||
log.String("commit", args.Commit),
|
||||
log.String("path", args.Path),
|
||||
log.Int("startLine", startLine),
|
||||
log.Int("endLine", endLine),
|
||||
},
|
||||
})
|
||||
defer endObservation()
|
||||
|
||||
rng, err := r.svc.GetRanges(ctx, args, r.requestState, startLine, endLine)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return rng, nil
|
||||
}
|
||||
|
||||
// References returns the list of source locations that reference the symbol at the given position.
|
||||
func (r *gitBlobLSIFDataResolver) References(ctx context.Context, line, character, limit int, rawCursor string) (_ []shared.UploadLocation, nextCursor string, err error) {
|
||||
args := shared.RequestArgs{RepositoryID: r.repositoryID, Commit: r.commit, Path: r.path, Line: line, Character: character, Limit: limit, RawCursor: rawCursor}
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.references, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
// Decode cursor given from previous response or create a new one with default values.
|
||||
// We use the cursor state track offsets with the result set and cache initial data that
|
||||
// is used to resolve each page. This cursor will be modified in-place to become the
|
||||
// cursor used to fetch the subsequent page of results in this result set.
|
||||
cursor, err := decodeReferencesCursor(args.RawCursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, fmt.Sprintf("invalid cursor: %q", args.RawCursor))
|
||||
}
|
||||
|
||||
refs, refCursor, err := r.svc.GetReferences(ctx, args, r.requestState, cursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, "svc.GetReferences")
|
||||
}
|
||||
|
||||
if refCursor.Phase != "done" {
|
||||
nextCursor = encodeReferencesCursor(refCursor)
|
||||
}
|
||||
|
||||
return refs, nextCursor, nil
|
||||
}
|
||||
|
||||
// Stencil returns all ranges within a single document.
|
||||
func (r *gitBlobLSIFDataResolver) Stencil(ctx context.Context) (adjustedRanges []shared.Range, err error) {
|
||||
args := shared.RequestArgs{RepositoryID: r.repositoryID, Commit: r.commit, Path: r.path}
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.stencil, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
st, err := r.svc.GetStencil(ctx, args, r.requestState)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "svc.GetStencil")
|
||||
}
|
||||
|
||||
return st, nil
|
||||
}
|
||||
@ -3,8 +3,13 @@ package graphql
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/sourcegraph/go-diff/diff"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
)
|
||||
|
||||
type Service interface {
|
||||
@ -18,4 +23,10 @@ type Service interface {
|
||||
|
||||
// Uploads Service
|
||||
GetDumpsByIDs(ctx context.Context, ids []int) (_ []shared.Dump, err error)
|
||||
GetClosestDumpsForBlob(ctx context.Context, repositoryID int, commit, path string, exactPath bool, indexer string) (_ []shared.Dump, err error)
|
||||
}
|
||||
|
||||
type GitserverClient interface {
|
||||
CommitsExist(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error)
|
||||
DiffPath(ctx context.Context, checker authz.SubRepoPermissionChecker, repo api.RepoName, sourceCommit, targetCommit, path string) ([]*diff.Hunk, error)
|
||||
}
|
||||
|
||||
@ -22,6 +22,8 @@ type operations struct {
|
||||
diagnostics *observation.Operation
|
||||
stencil *observation.Operation
|
||||
ranges *observation.Operation
|
||||
|
||||
getGitBlobLSIFDataResolver *observation.Operation
|
||||
}
|
||||
|
||||
func newOperations(observationContext *observation.Context) *operations {
|
||||
@ -50,6 +52,8 @@ func newOperations(observationContext *observation.Context) *operations {
|
||||
diagnostics: op("Diagnostics"),
|
||||
stencil: op("Stencil"),
|
||||
ranges: op("Ranges"),
|
||||
|
||||
getGitBlobLSIFDataResolver: op("GetGitBlobLSIFDataResolver"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -2,188 +2,61 @@ package graphql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/opentracing/opentracing-go/log"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
"github.com/sourcegraph/sourcegraph/internal/types"
|
||||
)
|
||||
|
||||
type Resolver interface {
|
||||
Definitions(ctx context.Context, args shared.RequestArgs) (_ []shared.UploadLocation, err error)
|
||||
Diagnostics(ctx context.Context, args shared.RequestArgs) (diagnosticsAtUploads []shared.DiagnosticAtUpload, _ int, err error)
|
||||
Hover(ctx context.Context, args shared.RequestArgs) (_ string, _ shared.Range, _ bool, err error)
|
||||
Implementations(ctx context.Context, args shared.RequestArgs) (_ []shared.UploadLocation, _ string, err error)
|
||||
Ranges(ctx context.Context, args shared.RequestArgs, startLine, endLine int) (adjustedRanges []shared.AdjustedCodeIntelligenceRange, err error)
|
||||
References(ctx context.Context, args shared.RequestArgs) (_ []shared.UploadLocation, _ string, err error)
|
||||
Stencil(ctx context.Context, args shared.RequestArgs) (adjustedRanges []shared.Range, err error)
|
||||
|
||||
GetHunkCacheSize() int
|
||||
GitBlobLSIFDataResolverFactory(ctx context.Context, repo *types.Repo, commit, path, toolName string, exactPath bool) (_ GitBlobLSIFDataResolver, err error)
|
||||
}
|
||||
|
||||
type resolver struct {
|
||||
svc Service
|
||||
|
||||
// Local Request Caches
|
||||
hunkCacheSize int
|
||||
svc Service
|
||||
gitserver GitserverClient
|
||||
maximumIndexesPerMonikerSearch int
|
||||
hunkCacheSize int
|
||||
|
||||
// Metrics
|
||||
operations *operations
|
||||
}
|
||||
|
||||
func New(svc Service, hunkCacheSize int, observationContext *observation.Context) *resolver {
|
||||
func New(svc Service, gitserver GitserverClient, maxIndexSearch, hunkCacheSize int, observationContext *observation.Context) Resolver {
|
||||
return &resolver{
|
||||
svc: svc,
|
||||
operations: newOperations(observationContext),
|
||||
hunkCacheSize: hunkCacheSize,
|
||||
svc: svc,
|
||||
gitserver: gitserver,
|
||||
operations: newOperations(observationContext),
|
||||
hunkCacheSize: hunkCacheSize,
|
||||
maximumIndexesPerMonikerSearch: maxIndexSearch,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *resolver) GetHunkCacheSize() int {
|
||||
return r.hunkCacheSize
|
||||
}
|
||||
const slowQueryResolverRequestThreshold = time.Second
|
||||
|
||||
// Definitions returns the list of source locations that define the symbol at the given position.
|
||||
func (r *resolver) Definitions(ctx context.Context, args shared.RequestArgs, requestState codenav.RequestState) (_ []shared.UploadLocation, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.definitions, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
def, err := r.svc.GetDefinitions(ctx, args, requestState)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "svc.GetDefinitions")
|
||||
}
|
||||
|
||||
return def, nil
|
||||
}
|
||||
|
||||
// Diagnostics returns the diagnostics for documents with the given path prefix.
|
||||
func (r *resolver) Diagnostics(ctx context.Context, args shared.RequestArgs, requestState codenav.RequestState) (diagnosticsAtUploads []shared.DiagnosticAtUpload, _ int, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.diagnostics, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
diag, totalCount, err := r.svc.GetDiagnostics(ctx, args, requestState)
|
||||
if err != nil {
|
||||
return nil, 0, errors.Wrap(err, "svc.GetDiagnostics")
|
||||
}
|
||||
|
||||
return diag, totalCount, nil
|
||||
}
|
||||
|
||||
// Hover returns the hover text and range for the symbol at the given position.
|
||||
func (r *resolver) Hover(ctx context.Context, args shared.RequestArgs, requestState codenav.RequestState) (_ string, _ shared.Range, _ bool, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.hover, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
hover, rng, ok, err := r.svc.GetHover(ctx, args, requestState)
|
||||
if err != nil {
|
||||
return "", shared.Range{}, false, err
|
||||
}
|
||||
|
||||
return hover, rng, ok, err
|
||||
}
|
||||
|
||||
// Implementations returns the list of source locations that define the symbol at the given position.
|
||||
func (r *resolver) Implementations(ctx context.Context, args shared.RequestArgs, requestState codenav.RequestState) (_ []shared.UploadLocation, nextCursor string, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.implementations, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
// Decode cursor given from previous response or create a new one with default values.
|
||||
// We use the cursor state track offsets with the result set and cache initial data that
|
||||
// is used to resolve each page. This cursor will be modified in-place to become the
|
||||
// cursor used to fetch the subsequent page of results in this result set.
|
||||
cursor, err := decodeImplementationsCursor(args.RawCursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, fmt.Sprintf("invalid cursor: %q", args.RawCursor))
|
||||
}
|
||||
|
||||
impls, implsCursor, err := r.svc.GetImplementations(ctx, args, requestState, cursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, "svc.GetImplementations")
|
||||
}
|
||||
|
||||
if implsCursor.Phase != "done" {
|
||||
nextCursor = encodeImplementationsCursor(implsCursor)
|
||||
}
|
||||
|
||||
return impls, nextCursor, nil
|
||||
}
|
||||
|
||||
// LSIFUploads returns the list of dbstore.Uploads for the store.Dumps determined to be applicable
|
||||
// for answering code-intel queries.
|
||||
func (r *resolver) LSIFUploads(ctx context.Context, requestState codenav.RequestState) (uploads []shared.Dump, err error) {
|
||||
cacheUploads := requestState.GetCacheUploads()
|
||||
ids := make([]int, 0, len(cacheUploads))
|
||||
for _, dump := range cacheUploads {
|
||||
ids = append(ids, dump.ID)
|
||||
}
|
||||
|
||||
dumps, err := r.svc.GetDumpsByIDs(ctx, ids)
|
||||
|
||||
return dumps, err
|
||||
}
|
||||
|
||||
// Ranges returns code intelligence for the ranges that fall within the given range of lines. These
|
||||
// results are partial and do not include references outside the current file, or any location that
|
||||
// requires cross-linking of bundles (cross-repo or cross-root).
|
||||
func (r *resolver) Ranges(ctx context.Context, args shared.RequestArgs, requestState codenav.RequestState, startLine, endLine int) (adjustedRanges []shared.AdjustedCodeIntelligenceRange, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.ranges, time.Second, observation.Args{
|
||||
func (r *resolver) GitBlobLSIFDataResolverFactory(ctx context.Context, repo *types.Repo, commit, path, toolName string, exactPath bool) (_ GitBlobLSIFDataResolver, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.getGitBlobLSIFDataResolver, slowQueryResolverRequestThreshold, observation.Args{
|
||||
LogFields: []log.Field{
|
||||
log.Int("repositoryID", args.RepositoryID),
|
||||
log.String("commit", args.Commit),
|
||||
log.String("path", args.Path),
|
||||
log.Int("startLine", startLine),
|
||||
log.Int("endLine", endLine),
|
||||
log.Int("repositoryID", int(repo.ID)),
|
||||
log.String("commit", commit),
|
||||
log.String("path", path),
|
||||
log.Bool("exactPath", exactPath),
|
||||
log.String("indexer", toolName),
|
||||
},
|
||||
})
|
||||
defer endObservation()
|
||||
|
||||
rng, err := r.svc.GetRanges(ctx, args, requestState, startLine, endLine)
|
||||
if err != nil {
|
||||
uploads, err := r.svc.GetClosestDumpsForBlob(ctx, int(repo.ID), commit, path, exactPath, toolName)
|
||||
if err != nil || len(uploads) == 0 {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return rng, nil
|
||||
}
|
||||
|
||||
// References returns the list of source locations that reference the symbol at the given position.
|
||||
func (r *resolver) References(ctx context.Context, args shared.RequestArgs, requestState codenav.RequestState) (_ []shared.UploadLocation, nextCursor string, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.references, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
// Decode cursor given from previous response or create a new one with default values.
|
||||
// We use the cursor state track offsets with the result set and cache initial data that
|
||||
// is used to resolve each page. This cursor will be modified in-place to become the
|
||||
// cursor used to fetch the subsequent page of results in this result set.
|
||||
cursor, err := decodeReferencesCursor(args.RawCursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, fmt.Sprintf("invalid cursor: %q", args.RawCursor))
|
||||
}
|
||||
|
||||
refs, refCursor, err := r.svc.GetReferences(ctx, args, requestState, cursor)
|
||||
if err != nil {
|
||||
return nil, "", errors.Wrap(err, "svc.GetReferences")
|
||||
}
|
||||
|
||||
if refCursor.Phase != "done" {
|
||||
nextCursor = encodeReferencesCursor(refCursor)
|
||||
}
|
||||
|
||||
return refs, nextCursor, nil
|
||||
}
|
||||
|
||||
// Stencil returns all ranges within a single document.
|
||||
func (r *resolver) Stencil(ctx context.Context, args shared.RequestArgs, requestState codenav.RequestState) (adjustedRanges []shared.Range, err error) {
|
||||
ctx, _, endObservation := observeResolver(ctx, &err, r.operations.stencil, time.Second, getObservationArgs(args))
|
||||
defer endObservation()
|
||||
|
||||
st, err := r.svc.GetStencil(ctx, args, requestState)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "svc.GetStencil")
|
||||
}
|
||||
|
||||
return st, nil
|
||||
reqState := codenav.NewRequestState(uploads, authz.DefaultSubRepoPermsChecker, r.gitserver, repo, commit, path, r.maximumIndexesPerMonikerSearch, r.hunkCacheSize)
|
||||
gbr := NewGitBlobLSIFDataResolver(r.svc, int(repo.ID), commit, path, r.operations, reqState)
|
||||
|
||||
return gbr, nil
|
||||
}
|
||||
|
||||
@ -9,6 +9,8 @@ import (
|
||||
"github.com/grafana/regexp"
|
||||
"github.com/opentracing/opentracing-go/log"
|
||||
|
||||
"github.com/sourcegraph/go-diff/diff"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/api"
|
||||
"github.com/sourcegraph/sourcegraph/internal/authz"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
@ -32,6 +34,10 @@ func New(db database.DB, dbStore DBStore, observationContext *observation.Contex
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) DiffPath(ctx context.Context, checker authz.SubRepoPermissionChecker, repo api.RepoName, sourceCommit, targetCommit, path string) ([]*diff.Hunk, error) {
|
||||
return gitserver.NewClient(c.db).DiffPath(ctx, checker, repo, sourceCommit, targetCommit, path)
|
||||
}
|
||||
|
||||
// CommitExists determines if the given commit exists in the given repository.
|
||||
func (c *Client) CommitExists(ctx context.Context, repositoryID int, commit string) (_ bool, err error) {
|
||||
ctx, _, endObservation := c.operations.commitExists.With(ctx, &err, observation.Args{LogFields: []log.Field{
|
||||
|
||||
@ -3,9 +3,14 @@ package uploads
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/stores/gitserver"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/locker"
|
||||
)
|
||||
|
||||
type Locker interface {
|
||||
Lock(ctx context.Context, key int32, blocking bool) (bool, locker.UnlockFunc, error)
|
||||
}
|
||||
|
||||
type CommitCache interface {
|
||||
ExistsBatch(ctx context.Context, commits []gitserver.RepositoryCommit) ([]bool, error)
|
||||
}
|
||||
|
||||
@ -17,13 +17,16 @@ type operations struct {
|
||||
updateSourcedCommits *observation.Operation
|
||||
getCommitsVisibleToUpload *observation.Operation
|
||||
getOldestCommitDate *observation.Operation
|
||||
hasCommit *observation.Operation
|
||||
|
||||
// Repositories
|
||||
getRepositoriesMaxStaleAge *observation.Operation
|
||||
setRepositoryAsDirty *observation.Operation
|
||||
setRepositoryAsDirtyWithTx *observation.Operation
|
||||
getDirtyRepositories *observation.Operation
|
||||
repoName *observation.Operation
|
||||
setRepositoriesForRetentionScan *observation.Operation
|
||||
hasRepository *observation.Operation
|
||||
|
||||
// Uploads
|
||||
getUploads *observation.Operation
|
||||
@ -82,13 +85,16 @@ func newOperations(observationContext *observation.Context) *operations {
|
||||
getStaleSourcedCommits: op("GetStaleSourcedCommits"),
|
||||
deleteSourcedCommits: op("DeleteSourcedCommits"),
|
||||
updateSourcedCommits: op("UpdateSourcedCommits"),
|
||||
hasCommit: op("HasCommit"),
|
||||
|
||||
// Repositories
|
||||
getRepositoriesMaxStaleAge: op("GetRepositoriesMaxStaleAge"),
|
||||
getDirtyRepositories: op("GetDirtyRepositories"),
|
||||
setRepositoryAsDirty: op("SetRepositoryAsDirty"),
|
||||
setRepositoryAsDirtyWithTx: op("SetRepositoryAsDirtyWithTx"),
|
||||
repoName: op("RepoName"),
|
||||
setRepositoriesForRetentionScan: op("SetRepositoriesForRetentionScan"),
|
||||
hasRepository: op("HasRepository"),
|
||||
|
||||
// Uploads
|
||||
getUploads: op("GetUploads"),
|
||||
|
||||
@ -28,14 +28,16 @@ type Store interface {
|
||||
GetStaleSourcedCommits(ctx context.Context, minimumTimeSinceLastCheck time.Duration, limit int, now time.Time) (_ []shared.SourcedCommits, err error)
|
||||
UpdateSourcedCommits(ctx context.Context, repositoryID int, commit string, now time.Time) (uploadsUpdated int, err error)
|
||||
DeleteSourcedCommits(ctx context.Context, repositoryID int, commit string, maximumCommitLag time.Duration, now time.Time) (uploadsUpdated int, uploadsDeleted int, err error)
|
||||
HasCommit(ctx context.Context, repositoryID int, commit string) (_ bool, err error)
|
||||
|
||||
// Repositories
|
||||
GetRepositoriesMaxStaleAge(ctx context.Context) (_ time.Duration, err error)
|
||||
SetRepositoryAsDirty(ctx context.Context, repositoryID int, tx *basestore.Store) (err error)
|
||||
SetRepositoryAsDirty(ctx context.Context, repositoryID int) (err error)
|
||||
GetDirtyRepositories(ctx context.Context) (_ map[int]int, err error)
|
||||
RepoName(ctx context.Context, repositoryID int) (_ string, err error) // TODO(numbers88s): renaming this after I remove dbStore from gitserver init.
|
||||
RepoNames(ctx context.Context, repositoryIDs ...int) (_ map[int]string, err error) // TODO(numbers88s): renaming this after I remove dbStore from gitserver init.
|
||||
SetRepositoriesForRetentionScan(ctx context.Context, processDelay time.Duration, limit int) (_ []int, err error)
|
||||
HasRepository(ctx context.Context, repositoryID int) (_ bool, err error)
|
||||
|
||||
// Uploads
|
||||
GetUploads(ctx context.Context, opts shared.GetUploadsOptions) (_ []shared.Upload, _ int, err error)
|
||||
|
||||
@ -10,6 +10,7 @@ import (
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/basestore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbutil"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
)
|
||||
|
||||
@ -269,3 +270,30 @@ const getOldestCommitDateQuery = `
|
||||
-- source: internal/codeintel/uploads/internal/store/store_commits.go:GetOldestCommitDate
|
||||
SELECT committed_at FROM lsif_uploads WHERE repository_id = %s AND state = 'completed' AND committed_at IS NOT NULL AND committed_at != '-infinity' ORDER BY committed_at LIMIT 1
|
||||
`
|
||||
|
||||
// HasCommit determines if the given commit is known for the given repository.
|
||||
func (s *store) HasCommit(ctx context.Context, repositoryID int, commit string) (_ bool, err error) {
|
||||
ctx, _, endObservation := s.operations.hasCommit.With(ctx, &err, observation.Args{LogFields: []log.Field{
|
||||
log.Int("repositoryID", repositoryID),
|
||||
log.String("commit", commit),
|
||||
}})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
count, _, err := basestore.ScanFirstInt(s.db.Query(
|
||||
ctx,
|
||||
sqlf.Sprintf(
|
||||
hasCommitQuery,
|
||||
repositoryID, dbutil.CommitBytea(commit),
|
||||
repositoryID, dbutil.CommitBytea(commit),
|
||||
),
|
||||
))
|
||||
|
||||
return count > 0, err
|
||||
}
|
||||
|
||||
const hasCommitQuery = `
|
||||
-- source: internal/codeintel/stores/dbstore/commits.go:HasCommit
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM lsif_nearest_uploads WHERE repository_id = %s AND commit_bytea = %s) +
|
||||
(SELECT COUNT(*) FROM lsif_nearest_uploads_links WHERE repository_id = %s AND commit_bytea = %s)
|
||||
`
|
||||
|
||||
@ -63,7 +63,17 @@ RETURNING repository_id
|
||||
`
|
||||
|
||||
// SetRepositoryAsDirty marks the given repository's commit graph as out of date.
|
||||
func (s *store) SetRepositoryAsDirty(ctx context.Context, repositoryID int, tx *basestore.Store) (err error) {
|
||||
func (s *store) SetRepositoryAsDirty(ctx context.Context, repositoryID int) (err error) {
|
||||
ctx, _, endObservation := s.operations.setRepositoryAsDirty.With(ctx, &err, observation.Args{LogFields: []log.Field{
|
||||
log.Int("repositoryID", repositoryID),
|
||||
}})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
return s.db.Exec(ctx, sqlf.Sprintf(setRepositoryAsDirtyQuery, repositoryID))
|
||||
}
|
||||
|
||||
// SetRepositoryAsDirtyWithTx marks the given repository's commit graph as out of date.
|
||||
func (s *store) setRepositoryAsDirtyWithTx(ctx context.Context, repositoryID int, tx *basestore.Store) (err error) {
|
||||
ctx, _, endObservation := s.operations.setRepositoryAsDirty.With(ctx, &err, observation.Args{LogFields: []log.Field{
|
||||
log.Int("repositoryID", repositoryID),
|
||||
}})
|
||||
@ -178,3 +188,19 @@ const repoNamesQuery = `
|
||||
-- source: internal/codeintel/uploads/internal/store/store_repositories.go:RepoNames
|
||||
SELECT id, name FROM repo WHERE id = ANY(%s)
|
||||
`
|
||||
|
||||
// HasRepository determines if there is LSIF data for the given repository.
|
||||
func (s *store) HasRepository(ctx context.Context, repositoryID int) (_ bool, err error) {
|
||||
ctx, _, endObservation := s.operations.hasRepository.With(ctx, &err, observation.Args{LogFields: []log.Field{
|
||||
log.Int("repositoryID", repositoryID),
|
||||
}})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
_, found, err := basestore.ScanFirstInt(s.db.Query(ctx, sqlf.Sprintf(hasRepositoryQuery, repositoryID)))
|
||||
return found, err
|
||||
}
|
||||
|
||||
const hasRepositoryQuery = `
|
||||
-- source: internal/codeintel/stores/dbstore/commits.go:HasRepository
|
||||
SELECT 1 FROM lsif_uploads WHERE state NOT IN ('deleted', 'deleting') AND repository_id = %s LIMIT 1
|
||||
`
|
||||
|
||||
@ -10,7 +10,6 @@ import (
|
||||
"github.com/sourcegraph/log/logtest"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/database"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/basestore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
|
||||
"github.com/sourcegraph/sourcegraph/internal/observation"
|
||||
)
|
||||
@ -19,14 +18,13 @@ func TestSetRepositoryAsDirty(t *testing.T) {
|
||||
logger := logtest.Scoped(t)
|
||||
db := database.NewDB(logger, dbtest.NewDB(logger, t))
|
||||
store := New(db, &observation.TestContext)
|
||||
tx := basestore.NewWithHandle(db.Handle())
|
||||
|
||||
for _, id := range []int{50, 51, 52} {
|
||||
insertRepo(t, db, id, "")
|
||||
}
|
||||
|
||||
for _, repositoryID := range []int{50, 51, 52, 51, 52} {
|
||||
if err := store.SetRepositoryAsDirty(context.Background(), repositoryID, tx); err != nil {
|
||||
if err := store.SetRepositoryAsDirty(context.Background(), repositoryID); err != nil {
|
||||
t.Errorf("unexpected error marking repository as dirty: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -331,7 +331,7 @@ func (s *store) SoftDeleteExpiredUploads(ctx context.Context) (count int, err er
|
||||
)
|
||||
|
||||
for repositoryID := range repositories {
|
||||
if err := s.SetRepositoryAsDirty(ctx, repositoryID, tx); err != nil {
|
||||
if err := s.setRepositoryAsDirtyWithTx(ctx, repositoryID, tx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,6 +41,7 @@ type operations struct {
|
||||
deleteUploadsWithoutRepository *observation.Operation
|
||||
deleteUploadsStuckUploading *observation.Operation
|
||||
hardDeleteUploads *observation.Operation
|
||||
inferClosestUploads *observation.Operation
|
||||
|
||||
// Dumps
|
||||
findClosestDumps *observation.Operation
|
||||
@ -107,6 +108,7 @@ func newOperations(observationContext *observation.Context) *operations {
|
||||
deleteUploadsStuckUploading: op("DeleteUploadsStuckUploading"),
|
||||
softDeleteExpiredUploads: op("SoftDeleteExpiredUploads"),
|
||||
hardDeleteUploads: op("HardDeleteUploads"),
|
||||
inferClosestUploads: op("InferClosestUploads"),
|
||||
|
||||
// Dumps
|
||||
findClosestDumps: op("FindClosestDumps"),
|
||||
|
||||
@ -15,7 +15,7 @@ import (
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/internal/lsifstore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/internal/store"
|
||||
"github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/shared"
|
||||
"github.com/sourcegraph/sourcegraph/internal/database/basestore"
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
gitserverOptions "github.com/sourcegraph/sourcegraph/internal/gitserver"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/internal/gitserver/gitdomain"
|
||||
@ -45,7 +45,7 @@ type service interface {
|
||||
// Repositories
|
||||
GetRepositoriesMaxStaleAge(ctx context.Context) (_ time.Duration, err error)
|
||||
GetDirtyRepositories(ctx context.Context) (_ map[int]int, err error)
|
||||
SetRepositoryAsDirty(ctx context.Context, repositoryID int, tx *basestore.Store) (err error)
|
||||
SetRepositoryAsDirty(ctx context.Context, repositoryID int) (err error)
|
||||
UpdateDirtyRepositories(ctx context.Context, maxAgeForNonStaleBranches time.Duration, maxAgeForNonStaleTags time.Duration) (err error)
|
||||
SetRepositoriesForRetentionScan(ctx context.Context, processDelay time.Duration, limit int) (_ []int, err error)
|
||||
|
||||
@ -59,6 +59,7 @@ type service interface {
|
||||
HardDeleteExpiredUploads(ctx context.Context) (count int, err error)
|
||||
DeleteUploadsStuckUploading(ctx context.Context, uploadedBefore time.Time) (_ int, err error)
|
||||
DeleteUploadsWithoutRepository(ctx context.Context, now time.Time) (_ map[int]int, err error)
|
||||
InferClosestUploads(ctx context.Context, repositoryID int, commit, path string, exactPath bool, indexer string) ([]shared.Dump, error)
|
||||
|
||||
// Dumps
|
||||
FindClosestDumps(ctx context.Context, repositoryID int, commit, path string, rootMustEnclosePath bool, indexer string) (_ []shared.Dump, err error)
|
||||
@ -191,11 +192,11 @@ func (s *Service) GetOldestCommitDate(ctx context.Context, repositoryID int) (ti
|
||||
return s.store.GetOldestCommitDate(ctx, repositoryID)
|
||||
}
|
||||
|
||||
func (s *Service) SetRepositoryAsDirty(ctx context.Context, repositoryID int, tx *basestore.Store) (err error) {
|
||||
func (s *Service) SetRepositoryAsDirty(ctx context.Context, repositoryID int) (err error) {
|
||||
ctx, _, endObservation := s.operations.setRepositoryAsDirty.With(ctx, &err, observation.Args{})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
return s.store.SetRepositoryAsDirty(ctx, repositoryID, tx)
|
||||
return s.store.SetRepositoryAsDirty(ctx, repositoryID)
|
||||
}
|
||||
|
||||
func (s *Service) UpdateDirtyRepositories(ctx context.Context, maxAgeForNonStaleBranches time.Duration, maxAgeForNonStaleTags time.Duration) (err error) {
|
||||
@ -389,6 +390,79 @@ func (s *Service) DeleteUploadsWithoutRepository(ctx context.Context, now time.T
|
||||
return s.store.DeleteUploadsWithoutRepository(ctx, now)
|
||||
}
|
||||
|
||||
// numAncestors is the number of ancestors to query from gitserver when trying to find the closest
|
||||
// ancestor we have data for. Setting this value too low (relative to a repository's commit rate)
|
||||
// will cause requests for an unknown commit return too few results; setting this value too high
|
||||
// will raise the latency of requests for an unknown commit.
|
||||
//
|
||||
// TODO(efritz) - make adjustable via site configuration
|
||||
const numAncestors = 100
|
||||
|
||||
// inferClosestUploads will return the set of visible uploads for the given commit. If this commit is
|
||||
// newer than our last refresh of the lsif_nearest_uploads table for this repository, then we will mark
|
||||
// the repository as dirty and quickly approximate the correct set of visible uploads.
|
||||
//
|
||||
// Because updating the entire commit graph is a blocking, expensive, and lock-guarded process, we want
|
||||
// to only do that in the background and do something chearp in latency-sensitive paths. To construct an
|
||||
// approximate result, we query gitserver for a (relatively small) set of ancestors for the given commit,
|
||||
// correlate that with the upload data we have for those commits, and re-run the visibility algorithm over
|
||||
// the graph. This will not always produce the full set of visible commits - some responses may not contain
|
||||
// all results while a subsequent request made after the lsif_nearest_uploads has been updated to include
|
||||
// this commit will.
|
||||
//
|
||||
func (s *Service) InferClosestUploads(ctx context.Context, repositoryID int, commit, path string, exactPath bool, indexer string) (_ []shared.Dump, err error) {
|
||||
ctx, _, endObservation := s.operations.inferClosestUploads.With(ctx, &err, observation.Args{})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
// The parameters exactPath and rootMustEnclosePath align here: if we're looking for dumps
|
||||
// that can answer queries for a directory (e.g. diagnostics), we want any dump that happens
|
||||
// to intersect the target directory. If we're looking for dumps that can answer queries for
|
||||
// a single file, then we need a dump with a root that properly encloses that file.
|
||||
if dumps, err := s.store.FindClosestDumps(ctx, repositoryID, commit, path, exactPath, indexer); err != nil {
|
||||
return nil, errors.Wrap(err, "store.FindClosestDumps")
|
||||
} else if len(dumps) != 0 {
|
||||
return dumps, nil
|
||||
}
|
||||
|
||||
// Repository has no LSIF data at all
|
||||
if repositoryExists, err := s.store.HasRepository(ctx, repositoryID); err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.HasRepository")
|
||||
} else if !repositoryExists {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Commit is known and the empty dumps list explicitly means nothing is visible
|
||||
if commitExists, err := s.store.HasCommit(ctx, repositoryID, commit); err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.HasCommit")
|
||||
} else if commitExists {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Otherwise, the repository has LSIF data but we don't know about the commit. This commit
|
||||
// is probably newer than our last upload. Pull back a portion of the updated commit graph
|
||||
// and try to link it with what we have in the database. Then mark the repository's commit
|
||||
// graph as dirty so it's updated for subsequent requests.
|
||||
|
||||
graph, err := s.gitserverClient.CommitGraph(ctx, repositoryID, gitserver.CommitGraphOptions{
|
||||
Commit: commit,
|
||||
Limit: numAncestors,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "gitserverClient.CommitGraph")
|
||||
}
|
||||
|
||||
dumps, err := s.store.FindClosestDumpsFromGraphFragment(ctx, repositoryID, commit, path, exactPath, indexer, graph)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.FindClosestDumpsFromGraphFragment")
|
||||
}
|
||||
|
||||
if err := s.store.SetRepositoryAsDirty(ctx, repositoryID); err != nil {
|
||||
return nil, errors.Wrap(err, "dbstore.MarkRepositoryAsDirty")
|
||||
}
|
||||
|
||||
return dumps, nil
|
||||
}
|
||||
|
||||
func (s *Service) FindClosestDumps(ctx context.Context, repositoryID int, commit, path string, rootMustEnclosePath bool, indexer string) (_ []shared.Dump, err error) {
|
||||
ctx, _, endObservation := s.operations.findClosestDumps.With(ctx, &err, observation.Args{})
|
||||
defer endObservation(1, observation.Args{})
|
||||
|
||||
@ -1166,7 +1166,6 @@ func (c *clientImplementor) do(ctx context.Context, repo api.RepoName, method, u
|
||||
|
||||
func (c *clientImplementor) CreateCommitFromPatch(ctx context.Context, req protocol.CreateCommitFromPatchRequest) (string, error) {
|
||||
resp, err := c.httpPost(ctx, req.Repo, "create-commit-from-patch", req)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@ -13,8 +13,11 @@
|
||||
- filename: enterprise/cmd/frontend/internal/codeintel/resolvers/mocks/mocks_temp.go
|
||||
path: github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/codeintel/resolvers
|
||||
interfaces:
|
||||
- QueryResolver
|
||||
- Resolver
|
||||
- filename: enterprise/cmd/frontend/internal/codeintel/resolvers/mocks/transport/mocks_temp.go
|
||||
path: github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/transport/graphql
|
||||
interfaces:
|
||||
- GitBlobLSIFDataResolver
|
||||
- filename: enterprise/cmd/frontend/internal/registry/stores/dbmocks/mocks_temp.go
|
||||
path: github.com/sourcegraph/sourcegraph/enterprise/cmd/frontend/internal/registry/stores
|
||||
interfaces:
|
||||
|
||||
@ -143,20 +143,20 @@
|
||||
path: github.com/sourcegraph/sourcegraph/internal/codeintel/stores/dbstore/migration
|
||||
interfaces:
|
||||
- GitServerClient
|
||||
- filename: internal/codeintel/codenav/mocks_store.go
|
||||
path: github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/internal/store
|
||||
interfaces:
|
||||
- Store
|
||||
- filename: internal/codeintel/codenav/mocks_lsifstore.go
|
||||
path: github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/internal/lsifstore
|
||||
interfaces:
|
||||
- LsifStore
|
||||
- filename: internal/codeintel/codenav/mocks_uploadservice.go
|
||||
path: github.com/sourcegraph/sourcegraph/internal/codeintel/codenav
|
||||
interfaces:
|
||||
- UploadService
|
||||
- GitTreeTranslator
|
||||
- GitserverClient
|
||||
- filename: internal/codeintel/codenav/mocks_test.go
|
||||
sources:
|
||||
- path: github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/internal/store
|
||||
interfaces:
|
||||
- Store
|
||||
- path: github.com/sourcegraph/sourcegraph/internal/codeintel/codenav/internal/lsifstore
|
||||
interfaces:
|
||||
- LsifStore
|
||||
- path: github.com/sourcegraph/sourcegraph/internal/codeintel/codenav
|
||||
interfaces:
|
||||
- UploadService
|
||||
- GitTreeTranslator
|
||||
- DBStore
|
||||
- GitserverClient
|
||||
- filename: internal/codeintel/uploads/background/cleanup/mocks_test.go
|
||||
path: github.com/sourcegraph/sourcegraph/internal/codeintel/uploads/background/cleanup
|
||||
interfaces:
|
||||
|
||||
Loading…
Reference in New Issue
Block a user