mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 14:11:44 +00:00
codeintel: Make codenav results more consistent (#54410)
This commit is contained in:
parent
a59fd1e6d8
commit
0889c891db
@ -5,14 +5,12 @@
|
||||
"Config": {
|
||||
"url": "https://github.com",
|
||||
"repos": [
|
||||
"sourcegraph-testing/etcd",
|
||||
"sourcegraph-testing/tidb",
|
||||
"sourcegraph-testing/titan",
|
||||
"sourcegraph-testing/zap",
|
||||
"sourcegraph-testing/nacelle",
|
||||
"sourcegraph-testing/nacelle-config",
|
||||
"sourcegraph-testing/nacelle-service",
|
||||
"sourcegraph/code-intel-extensions"
|
||||
"go-nacelle/config",
|
||||
"go-nacelle/log",
|
||||
"go-nacelle/nacelle",
|
||||
"go-nacelle/process",
|
||||
"go-nacelle/service",
|
||||
"sourcegraph-testing/nav-test"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,11 +7,9 @@ This package provides integration and load testing utilities for precise code in
|
||||
Ensure that the following tools are available on your path:
|
||||
|
||||
- [`src`](https://github.com/sourcegraph/src-cli)
|
||||
- [`lsif-go`](https://github.com/sourcegraph/lsif-go)
|
||||
- [`scip-go`](https://github.com/sourcegraph/scip-go)
|
||||
- [`scip-typescript`](https://github.com/sourcegraph/scip-typescript)
|
||||
|
||||
You should have enviornment variables that authenticate you to the `sourcegraph-dev` GCS project if you plan to upload or download index files (as we do in CI).
|
||||
You should have environment variables that authenticate you to the `sourcegraph-dev` GCS project if you plan to upload or download index files (as we do in CI).
|
||||
|
||||
Set:
|
||||
|
||||
@ -26,14 +24,12 @@ SOURCEGRAPH_SUDO_TOKEN=<YOUR SOURCEGRAPH API ACCESS TOKEN>
|
||||
|
||||
```
|
||||
"repos": [
|
||||
"sourcegraph-testing/etcd",
|
||||
"sourcegraph-testing/tidb",
|
||||
"sourcegraph-testing/titan",
|
||||
"sourcegraph-testing/zap",
|
||||
"sourcegraph-testing/nacelle",
|
||||
"sourcegraph-testing/nacelle-config",
|
||||
"sourcegraph-testing/nacelle-service",
|
||||
"sourcegraph/code-intel-extensions"
|
||||
"go-nacelle/config",
|
||||
"go-nacelle/log",
|
||||
"go-nacelle/nacelle",
|
||||
"go-nacelle/process",
|
||||
"go-nacelle/service",
|
||||
"sourcegraph-testing/nav-test",
|
||||
],
|
||||
```
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/sourcegraph/conc/pool"
|
||||
"github.com/sourcegraph/run"
|
||||
@ -24,98 +25,30 @@ func main() {
|
||||
const (
|
||||
relativeReposDir = "dev/codeintel-qa/testdata/repos"
|
||||
relativeIndexesDir = "dev/codeintel-qa/testdata/indexes"
|
||||
numNavTestRoots = 100
|
||||
)
|
||||
|
||||
var navTestRoots = func() (roots []string) {
|
||||
for p := 0; p < numNavTestRoots; p++ {
|
||||
roots = append(roots, fmt.Sprintf("proj%d/", p+1))
|
||||
}
|
||||
|
||||
return roots
|
||||
}()
|
||||
|
||||
var repositoryMeta = []struct {
|
||||
org string
|
||||
name string
|
||||
indexer string
|
||||
revisions []string
|
||||
org string
|
||||
name string
|
||||
indexer string
|
||||
revision string
|
||||
roots []string
|
||||
}{
|
||||
// This repository has not been changed from its upstream
|
||||
{
|
||||
org: "sourcegraph-testing",
|
||||
name: "zap",
|
||||
indexer: "lsif-go",
|
||||
revisions: []string{
|
||||
"a6015e13fab9b744d96085308ce4e8f11bad1996",
|
||||
"2aa9fa25da83bdfff756c36a91442edc9a84576c",
|
||||
},
|
||||
},
|
||||
|
||||
// Each commit here is tagged as sg-test-1, sg-test-2, and sg-test-3, respectively. See CHANGES.md in the root of the
|
||||
// repository's master branch to see a history of changes and which revisions were targeted. We specifically use replace
|
||||
// directives in the project root's go.mod file to target sourcegraph-testing/zap, which has no changes of its own. This
|
||||
// simulates how common forking works in the Go ecosystem (see our own use of zoekt).
|
||||
//
|
||||
// To ensure that the last commit in the list for each repository is visible at tip, the master branch's last commit is
|
||||
// a merge commit between the true upstream tip and sg-test-3.
|
||||
{
|
||||
org: "sourcegraph-testing",
|
||||
name: "etcd",
|
||||
indexer: "lsif-go",
|
||||
revisions: []string{
|
||||
"4397ceb9c11be0b3e9ee0111230235c868ba581d",
|
||||
"bc588b7a2e9af4f903396cdcf66f56190b9e254f",
|
||||
"ad7848014a051dbe3fcd6a4cff2c7befdd16d5a8",
|
||||
},
|
||||
},
|
||||
{
|
||||
org: "sourcegraph-testing",
|
||||
name: "tidb",
|
||||
indexer: "lsif-go",
|
||||
revisions: []string{
|
||||
"8eaaa098b4e938b18485f7b1fa7d8e720b04c699",
|
||||
"b5f100a179e20d5539e629bd0919d05774cb7c6a",
|
||||
"9aab49176993f9dc0ed2fcb9ef7e5125518e8b98",
|
||||
},
|
||||
},
|
||||
{
|
||||
org: "sourcegraph-testing",
|
||||
name: "titan",
|
||||
indexer: "lsif-go",
|
||||
revisions: []string{
|
||||
"fb38de395ba67f49978b218e099de1c45122fb50",
|
||||
"415ffd5a3ba7a92a07cd96c7d9f4b734f61248f7",
|
||||
"f8307e394c512b4263fc0cd67ccf9fd46f1ad9a5",
|
||||
},
|
||||
},
|
||||
|
||||
// These repositories have their module names modified and new tags created to refer to each other
|
||||
{
|
||||
org: "sourcegraph-testing",
|
||||
name: "nacelle",
|
||||
indexer: "scip-go",
|
||||
revisions: []string{
|
||||
"68d3125fb03d4aec540714577401f9f01adffa8a",
|
||||
},
|
||||
},
|
||||
{
|
||||
org: "sourcegraph-testing",
|
||||
name: "nacelle-config",
|
||||
indexer: "scip-go",
|
||||
revisions: []string{
|
||||
"4d4864d3b5b046fe12154f3aae7a86a04690c4ae",
|
||||
},
|
||||
},
|
||||
{
|
||||
org: "sourcegraph-testing",
|
||||
name: "nacelle-service",
|
||||
indexer: "scip-go",
|
||||
revisions: []string{
|
||||
"0652f3023c1bc7e7466a487f20bbe4b5e28fdcc7",
|
||||
},
|
||||
},
|
||||
|
||||
// This repository is archived in-practice and as a good candidate for a low-effort scip-typescript test
|
||||
{
|
||||
org: "sourcegraph",
|
||||
name: "code-intel-extensions",
|
||||
indexer: "scip-typescript",
|
||||
revisions: []string{
|
||||
"c66e756d3d68a1e19048c3f7515ba42a7e793767",
|
||||
},
|
||||
},
|
||||
{org: "go-nacelle", name: "config", indexer: "scip-go", revision: "72304c5497e662dcf50af212695d2f232b4d32be", roots: []string{""}},
|
||||
{org: "go-nacelle", name: "log", indexer: "scip-go", revision: "b380f4731178f82639695e2a69ae6ec2b8b6dbed", roots: []string{""}},
|
||||
{org: "go-nacelle", name: "nacelle", indexer: "scip-go", revision: "05cf7092f82bddbbe0634fa8ca48067bd219a5b5", roots: []string{""}},
|
||||
{org: "go-nacelle", name: "process", indexer: "scip-go", revision: "ffadb09a02ca0a8aa6518cf6c118f85ccdc0306c", roots: []string{""}},
|
||||
{org: "go-nacelle", name: "service", indexer: "scip-go", revision: "ca413da53bba12c23bb73ecf3c7e781664d650e0", roots: []string{""}},
|
||||
{org: "sourcegraph-testing", name: "nav-test", indexer: "scip-go", revision: "9156747cf1787b8245f366f81145d565f22c6041", roots: navTestRoots},
|
||||
}
|
||||
|
||||
func mainErr(ctx context.Context) error {
|
||||
@ -182,18 +115,24 @@ func indexAll(ctx context.Context) error {
|
||||
p := pool.New().WithErrors()
|
||||
|
||||
for _, meta := range repositoryMeta {
|
||||
org, name, indexer, revisions := meta.org, meta.name, meta.indexer, meta.revisions
|
||||
org, name, indexer, revision, roots := meta.org, meta.name, meta.indexer, meta.revision, meta.roots
|
||||
pair, ok := indexFunMap[indexer]
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("unknown language %q", indexer))
|
||||
}
|
||||
|
||||
p.Go(func() error {
|
||||
for i, revision := range revisions {
|
||||
revision := revision
|
||||
targetFile := filepath.Join(indexesDir, fmt.Sprintf("%s.%s.%d.%s.%s", org, name, i, revision, pair.Extension))
|
||||
for _, root := range roots {
|
||||
cleanRoot := root
|
||||
if cleanRoot == "" {
|
||||
cleanRoot = "/"
|
||||
}
|
||||
cleanRoot = strings.ReplaceAll(cleanRoot, "/", "_")
|
||||
|
||||
if err := pair.IndexFunc(ctx, reposDir, targetFile, name, revision); err != nil {
|
||||
revision := revision
|
||||
targetFile := filepath.Join(indexesDir, fmt.Sprintf("%s.%s.%s.%s.%s", org, name, revision, cleanRoot, pair.Extension))
|
||||
|
||||
if err := pair.IndexFunc(ctx, reposDir, targetFile, name, revision, root); err != nil {
|
||||
return errors.Wrapf(err, "failed to index %s@%s", name, revision)
|
||||
}
|
||||
}
|
||||
@ -207,38 +146,45 @@ func indexAll(ctx context.Context) error {
|
||||
|
||||
type IndexerPair struct {
|
||||
Extension string
|
||||
IndexFunc func(context.Context, string, string, string, string) error
|
||||
IndexFunc func(context.Context, string, string, string, string, string) error
|
||||
}
|
||||
|
||||
var indexFunMap = map[string]IndexerPair{
|
||||
"lsif-go": {"dump", indexGoWithLSIF},
|
||||
"scip-go": {"scip", indexGoWithSCIP},
|
||||
"scip-typescript": {"scip", indexTypeScriptWithSCIP},
|
||||
// "lsif-go": {"dump", indexGoWithLSIF},
|
||||
"scip-go": {"scip", indexGoWithSCIP},
|
||||
// "scip-typescript": {"scip", indexTypeScriptWithSCIP},
|
||||
}
|
||||
|
||||
func indexGoWithLSIF(ctx context.Context, reposDir, targetFile, name, revision string) error {
|
||||
// func indexGoWithLSIF(ctx context.Context, reposDir, targetFile, name, revision, root string) error {
|
||||
// return indexGeneric(ctx, reposDir, targetFile, name, revision, func(repoCopyDir string) error {
|
||||
// if err := run.Bash(ctx, "go", "mod", "tidy").Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
// return err
|
||||
// }
|
||||
// if err := run.Bash(ctx, "go", "mod", "vendor").Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
// return err
|
||||
// }
|
||||
// // --repository-root=. is necessary here as the temp dir might be within a strange
|
||||
// // nest of symlinks on MacOS, which confuses the repository root detection in lsif-go.
|
||||
// if err := run.Bash(ctx, "lsif-go", "--repository-root=.", "-o", targetFile).Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
// return nil
|
||||
// })
|
||||
// }
|
||||
|
||||
func indexGoWithSCIP(ctx context.Context, reposDir, targetFile, name, revision, root string) error {
|
||||
return indexGeneric(ctx, reposDir, targetFile, name, revision, func(repoCopyDir string) error {
|
||||
if err := run.Bash(ctx, "go", "mod", "tidy").Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := run.Bash(ctx, "go", "mod", "vendor").Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
// --repository-root=. is necessary here as the temp dir might be within a strange
|
||||
// nest of symlinks on MacOS, which confuses the repository root detection in lsif-go.
|
||||
if err := run.Bash(ctx, "lsif-go", "--repository-root=.", "-o", targetFile).Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
return err
|
||||
repoRoot := "."
|
||||
if root != "" {
|
||||
// If we're applying a root then we look _one back_ for the repository root
|
||||
// NOTE: we make the assumption that roots are single-directory for integration suite
|
||||
repoRoot = ".."
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func indexGoWithSCIP(ctx context.Context, reposDir, targetFile, name, revision string) error {
|
||||
return indexGeneric(ctx, reposDir, targetFile, name, revision, func(repoCopyDir string) error {
|
||||
// --repository-root=. is necessary here as the temp dir might be within a strange
|
||||
// nest of symlinks on MacOS, which confuses the repository root detection in scip-go.
|
||||
if err := run.Bash(ctx, "scip-go", "--repository-root=.", "-o", targetFile).Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
if err := run.Bash(ctx, "scip-go", fmt.Sprintf("--repository-root=%s", repoRoot), "-o", targetFile).Dir(filepath.Join(repoCopyDir, root)).Run().Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -246,18 +192,18 @@ func indexGoWithSCIP(ctx context.Context, reposDir, targetFile, name, revision s
|
||||
})
|
||||
}
|
||||
|
||||
func indexTypeScriptWithSCIP(ctx context.Context, reposDir, targetFile, name, revision string) error {
|
||||
return indexGeneric(ctx, reposDir, targetFile, name, revision, func(repoCopyDir string) error {
|
||||
if err := run.Bash(ctx, "yarn").Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := run.Bash(ctx, "scip-typescript", "index", "--output", targetFile).Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
// func indexTypeScriptWithSCIP(ctx context.Context, reposDir, targetFile, name, revision, root string) error {
|
||||
// return indexGeneric(ctx, reposDir, targetFile, name, revision, func(repoCopyDir string) error {
|
||||
// if err := run.Bash(ctx, "yarn").Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
// return err
|
||||
// }
|
||||
// if err := run.Bash(ctx, "scip-typescript", "index", "--output", targetFile).Dir(repoCopyDir).Run().Wait(); err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
// return nil
|
||||
// })
|
||||
// }
|
||||
|
||||
func indexGeneric(ctx context.Context, reposDir, targetFile, name, revision string, index func(repoCopyDir string) error) error {
|
||||
if ok, err := internal.FileExists(targetFile); err != nil {
|
||||
|
||||
@ -10,6 +10,7 @@ go_library(
|
||||
"query_types.go",
|
||||
"state.go",
|
||||
"test_cases.go",
|
||||
"test_cases_utils.go",
|
||||
],
|
||||
importpath = "github.com/sourcegraph/sourcegraph/dev/codeintel-qa/cmd/query",
|
||||
visibility = ["//visibility:private"],
|
||||
|
||||
@ -18,24 +18,9 @@ func buildQueries() <-chan queryFunc {
|
||||
go func() {
|
||||
defer close(fns)
|
||||
|
||||
for _, testCase := range testCases {
|
||||
// Definition returns definition
|
||||
fns <- makeTestFunc("def -> def", queryDefinitions, testCase.Definition, []Location{testCase.Definition})
|
||||
|
||||
// References return definition
|
||||
for _, reference := range testCase.References {
|
||||
fns <- makeTestFunc("refs -> def", queryDefinitions, reference, []Location{testCase.Definition})
|
||||
}
|
||||
|
||||
// Definition returns references (including definition)
|
||||
fns <- makeTestFunc("def -> refs", queryReferences, testCase.Definition, testCase.References)
|
||||
|
||||
// References return references
|
||||
if queryReferencesOfReferences {
|
||||
for _, reference := range testCase.References {
|
||||
references := testCase.References
|
||||
fns <- makeTestFunc("refs -> refs", queryReferences, reference, references)
|
||||
}
|
||||
for _, generator := range testCaseGenerators {
|
||||
for _, testCase := range generator() {
|
||||
fns <- testCase
|
||||
}
|
||||
}
|
||||
}()
|
||||
@ -58,6 +43,7 @@ func makeTestFunc(name string, f testFunc, source Location, expectedLocations []
|
||||
|
||||
if checkQueryResult {
|
||||
sortLocations(locations)
|
||||
sortLocations(expectedLocations)
|
||||
|
||||
if allowDirtyInstance {
|
||||
// We allow other upload records to exist on the instance, so we might have
|
||||
|
||||
@ -8,8 +8,9 @@ import (
|
||||
|
||||
const preciseIndexesQuery = `
|
||||
query PreciseIndexes {
|
||||
preciseIndexes(states: [COMPLETED]) {
|
||||
preciseIndexes(states: [COMPLETED], first: 1000) {
|
||||
nodes {
|
||||
inputRoot
|
||||
projectRoot {
|
||||
repository {
|
||||
name
|
||||
@ -23,11 +24,17 @@ const preciseIndexesQuery = `
|
||||
}
|
||||
`
|
||||
|
||||
func queryPreciseIndexes(ctx context.Context) (_ map[string][]string, err error) {
|
||||
type CommitAndRoot struct {
|
||||
Commit string
|
||||
Root string
|
||||
}
|
||||
|
||||
func queryPreciseIndexes(ctx context.Context) (_ map[string][]CommitAndRoot, err error) {
|
||||
var payload struct {
|
||||
Data struct {
|
||||
PreciseIndexes struct {
|
||||
Nodes []struct {
|
||||
InputRoot string `json:"inputRoot"`
|
||||
ProjectRoot struct {
|
||||
Repository struct {
|
||||
Name string `json:"name"`
|
||||
@ -44,15 +51,16 @@ func queryPreciseIndexes(ctx context.Context) (_ map[string][]string, err error)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
commitsByRepo := map[string][]string{}
|
||||
rootsByCommitsByRepo := map[string][]CommitAndRoot{}
|
||||
for _, node := range payload.Data.PreciseIndexes.Nodes {
|
||||
root := node.InputRoot
|
||||
projectRoot := node.ProjectRoot
|
||||
name := projectRoot.Repository.Name
|
||||
commit := projectRoot.Commit.OID
|
||||
commitsByRepo[name] = append(commitsByRepo[name], commit)
|
||||
rootsByCommitsByRepo[name] = append(rootsByCommitsByRepo[name], CommitAndRoot{commit, root})
|
||||
}
|
||||
|
||||
return commitsByRepo, nil
|
||||
return rootsByCommitsByRepo, nil
|
||||
}
|
||||
|
||||
const definitionsQuery = `
|
||||
@ -99,7 +107,6 @@ pageInfo {
|
||||
}
|
||||
`
|
||||
|
||||
// queryDefinitions returns all of the LSIF definitions for the given location.
|
||||
func queryDefinitions(ctx context.Context, location Location) (locations []Location, err error) {
|
||||
variables := map[string]any{
|
||||
"repository": location.Repo,
|
||||
@ -143,7 +150,6 @@ const referencesQuery = `
|
||||
}
|
||||
`
|
||||
|
||||
// queryReferences returns all of the LSIF references for the given location.
|
||||
func queryReferences(ctx context.Context, location Location) (locations []Location, err error) {
|
||||
endCursor := ""
|
||||
for {
|
||||
@ -181,6 +187,112 @@ func queryReferences(ctx context.Context, location Location) (locations []Locati
|
||||
return locations, nil
|
||||
}
|
||||
|
||||
const implementationsQuery = `
|
||||
query Implementations($repository: String!, $commit: String!, $path: String!, $line: Int!, $character: Int!, $after: String) {
|
||||
repository(name: $repository) {
|
||||
commit(rev: $commit) {
|
||||
blob(path: $path) {
|
||||
lsif {
|
||||
implementations(line: $line, character: $character, after: $after) {
|
||||
` + locationsFragment + `
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
func queryImplementations(ctx context.Context, location Location) (locations []Location, err error) {
|
||||
endCursor := ""
|
||||
for {
|
||||
variables := map[string]any{
|
||||
"repository": location.Repo,
|
||||
"commit": location.Rev,
|
||||
"path": location.Path,
|
||||
"line": location.Line,
|
||||
"character": location.Character,
|
||||
}
|
||||
if endCursor != "" {
|
||||
variables["after"] = endCursor
|
||||
}
|
||||
|
||||
var payload QueryResponse
|
||||
if err := queryGraphQL(ctx, "CodeIntelQA_Query_Implementations", implementationsQuery, variables, &payload); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, node := range payload.Data.Repository.Commit.Blob.LSIF.Implementations.Nodes {
|
||||
locations = append(locations, Location{
|
||||
Repo: node.Resource.Repository.Name,
|
||||
Rev: node.Resource.Commit.Oid,
|
||||
Path: node.Resource.Path,
|
||||
Line: node.Range.Start.Line,
|
||||
Character: node.Range.Start.Character,
|
||||
})
|
||||
}
|
||||
|
||||
if endCursor = payload.Data.Repository.Commit.Blob.LSIF.Implementations.PageInfo.EndCursor; endCursor == "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return locations, nil
|
||||
}
|
||||
|
||||
const prototypesQuery = `
|
||||
query Prototypes($repository: String!, $commit: String!, $path: String!, $line: Int!, $character: Int!, $after: String) {
|
||||
repository(name: $repository) {
|
||||
commit(rev: $commit) {
|
||||
blob(path: $path) {
|
||||
lsif {
|
||||
prototypes(line: $line, character: $character, after: $after) {
|
||||
` + locationsFragment + `
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
func queryPrototypes(ctx context.Context, location Location) (locations []Location, err error) {
|
||||
endCursor := ""
|
||||
for {
|
||||
variables := map[string]any{
|
||||
"repository": location.Repo,
|
||||
"commit": location.Rev,
|
||||
"path": location.Path,
|
||||
"line": location.Line,
|
||||
"character": location.Character,
|
||||
}
|
||||
if endCursor != "" {
|
||||
variables["after"] = endCursor
|
||||
}
|
||||
|
||||
var payload QueryResponse
|
||||
if err := queryGraphQL(ctx, "CodeIntelQA_Query_Prototypes", prototypesQuery, variables, &payload); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, node := range payload.Data.Repository.Commit.Blob.LSIF.Prototypes.Nodes {
|
||||
locations = append(locations, Location{
|
||||
Repo: node.Resource.Repository.Name,
|
||||
Rev: node.Resource.Commit.Oid,
|
||||
Path: node.Resource.Path,
|
||||
Line: node.Range.Start.Line,
|
||||
Character: node.Range.Start.Character,
|
||||
})
|
||||
}
|
||||
|
||||
if endCursor = payload.Data.Repository.Commit.Blob.LSIF.Prototypes.PageInfo.EndCursor; endCursor == "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return locations, nil
|
||||
}
|
||||
|
||||
// sortLocations sorts a slice of Locations by repo, rev, path, line, then character.
|
||||
func sortLocations(locations []Location) {
|
||||
sort.Slice(locations, func(i, j int) bool {
|
||||
|
||||
@ -6,8 +6,10 @@ type QueryResponse struct {
|
||||
Commit struct {
|
||||
Blob struct {
|
||||
LSIF struct {
|
||||
Definitions Definitions `json:"definitions"`
|
||||
References References `json:"references"`
|
||||
Definitions Definitions `json:"definitions"`
|
||||
References References `json:"references"`
|
||||
Implementations Implementations `json:"implementations"`
|
||||
Prototypes Prototypes `json:"prototypes"`
|
||||
} `json:"lsif"`
|
||||
} `json:"blob"`
|
||||
} `json:"commit"`
|
||||
@ -24,6 +26,16 @@ type References struct {
|
||||
PageInfo PageInfo `json:"pageInfo"`
|
||||
}
|
||||
|
||||
type Implementations struct {
|
||||
Nodes []Node `json:"nodes"`
|
||||
PageInfo PageInfo `json:"pageInfo"`
|
||||
}
|
||||
|
||||
type Prototypes struct {
|
||||
Nodes []Node `json:"nodes"`
|
||||
PageInfo PageInfo `json:"pageInfo"`
|
||||
}
|
||||
|
||||
type Node struct {
|
||||
Resource `json:"resource"`
|
||||
Range `json:"range"`
|
||||
|
||||
@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
@ -25,24 +26,33 @@ func instanceStateDiff(ctx context.Context) (string, error) {
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
expectedCommitsByRepo := map[string][]string{}
|
||||
expectedCommitAndRootsByRepo := map[string][]CommitAndRoot{}
|
||||
for repoName, extensionAndCommits := range extensionAndCommitsByRepo {
|
||||
commits := make([]string, 0, len(extensionAndCommits))
|
||||
commitAndRoots := make([]CommitAndRoot, 0, len(extensionAndCommits))
|
||||
for _, e := range extensionAndCommits {
|
||||
commits = append(commits, e.Commit)
|
||||
root := strings.ReplaceAll(e.Root, "_", "/")
|
||||
if root == "/" {
|
||||
root = ""
|
||||
}
|
||||
|
||||
commitAndRoots = append(commitAndRoots, CommitAndRoot{e.Commit, root})
|
||||
}
|
||||
|
||||
sort.Strings(commits)
|
||||
expectedCommitsByRepo[internal.MakeTestRepoName(repoName)] = commits
|
||||
expectedCommitAndRootsByRepo[internal.MakeTestRepoName(repoName)] = commitAndRoots
|
||||
}
|
||||
|
||||
uploadedCommitsByRepo, err := queryPreciseIndexes(ctx)
|
||||
uploadedCommitAndRootsByRepo, err := queryPreciseIndexes(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
for _, commits := range uploadedCommitsByRepo {
|
||||
sort.Strings(commits)
|
||||
|
||||
for _, commitAndRoots := range uploadedCommitAndRootsByRepo {
|
||||
sortCommitAndRoots(commitAndRoots)
|
||||
}
|
||||
for _, commitAndRoots := range expectedCommitAndRootsByRepo {
|
||||
sortCommitAndRoots(commitAndRoots)
|
||||
}
|
||||
|
||||
if allowDirtyInstance {
|
||||
// We allow other upload records to exist on the instance, but we still
|
||||
// need to ensure that the set of uploads we require for the tests remain
|
||||
@ -50,21 +60,38 @@ func instanceStateDiff(ctx context.Context) (string, error) {
|
||||
// commits that don't exist in our expected list, and check only that we
|
||||
// have a superset of our expected state.
|
||||
|
||||
for repoName, commits := range uploadedCommitsByRepo {
|
||||
if expectedCommits, ok := expectedCommitsByRepo[repoName]; !ok {
|
||||
delete(uploadedCommitsByRepo, repoName)
|
||||
for repoName, commitAndRoots := range uploadedCommitAndRootsByRepo {
|
||||
if expectedCommits, ok := expectedCommitAndRootsByRepo[repoName]; !ok {
|
||||
delete(uploadedCommitAndRootsByRepo, repoName)
|
||||
} else {
|
||||
filtered := commits[:0]
|
||||
for _, commit := range commits {
|
||||
if i := sort.SearchStrings(expectedCommits, commit); i < len(expectedCommits) && expectedCommits[i] == commit {
|
||||
filtered = append(filtered, commit)
|
||||
filtered := commitAndRoots[:0]
|
||||
for _, commitAndRoot := range commitAndRoots {
|
||||
found := false
|
||||
for _, ex := range expectedCommits {
|
||||
if ex.Commit == commitAndRoot.Commit && ex.Root == commitAndRoot.Root {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
filtered = append(filtered, commitAndRoot)
|
||||
}
|
||||
|
||||
uploadedCommitsByRepo[repoName] = filtered
|
||||
}
|
||||
|
||||
uploadedCommitAndRootsByRepo[repoName] = filtered
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cmp.Diff(expectedCommitsByRepo, uploadedCommitsByRepo), nil
|
||||
return cmp.Diff(expectedCommitAndRootsByRepo, uploadedCommitAndRootsByRepo), nil
|
||||
}
|
||||
|
||||
func sortCommitAndRoots(commitAndRoots []CommitAndRoot) {
|
||||
sort.Slice(commitAndRoots, func(i, j int) bool {
|
||||
if commitAndRoots[i].Commit != commitAndRoots[j].Commit {
|
||||
return commitAndRoots[i].Commit < commitAndRoots[j].Commit
|
||||
}
|
||||
|
||||
return commitAndRoots[i].Root < commitAndRoots[j].Root
|
||||
})
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
110
dev/codeintel-qa/cmd/query/test_cases_utils.go
Normal file
110
dev/codeintel-qa/cmd/query/test_cases_utils.go
Normal file
@ -0,0 +1,110 @@
|
||||
package main
|
||||
|
||||
import "fmt"
|
||||
|
||||
// Location specifies the first position in a source range.
|
||||
type Location struct {
|
||||
Repo string
|
||||
Rev string
|
||||
Path string
|
||||
Line int
|
||||
Character int
|
||||
}
|
||||
|
||||
type TaggedLocation struct {
|
||||
Location Location
|
||||
IgnoreSiblingRelationships bool
|
||||
}
|
||||
|
||||
const maxRefToDefAssertionsPerFile = 10
|
||||
|
||||
// generate tests that asserts definition <> reference relationships on a particular set of
|
||||
// locations all referring to the same SCIP symbol
|
||||
func makeDefsRefsTests(symbolName string, defs []Location, refs []TaggedLocation) (fns []queryFunc) {
|
||||
var untagagedRefs []Location
|
||||
for _, taggedLocation := range refs {
|
||||
untagagedRefs = append(untagagedRefs, taggedLocation.Location)
|
||||
}
|
||||
|
||||
for _, def := range defs {
|
||||
fns = append(fns,
|
||||
makeDefsTest(symbolName, "definition", def, defs), // "you are at definition"
|
||||
makeRefsTest(symbolName, "definition", def, untagagedRefs), // def -> refs
|
||||
)
|
||||
}
|
||||
|
||||
sourceFiles := map[string]int{}
|
||||
|
||||
for _, ref := range refs {
|
||||
if ref.IgnoreSiblingRelationships {
|
||||
continue
|
||||
}
|
||||
|
||||
sourceFiles[ref.Location.Path] = sourceFiles[ref.Location.Path] + 1
|
||||
if sourceFiles[ref.Location.Path] >= maxRefToDefAssertionsPerFile {
|
||||
continue
|
||||
}
|
||||
|
||||
// ref -> def
|
||||
fns = append(fns, makeDefsTest(symbolName, "reference", ref.Location, defs))
|
||||
|
||||
if queryReferencesOfReferences {
|
||||
// global search for other refs
|
||||
fns = append(fns, makeRefsTest(symbolName, "reference", ref.Location, untagagedRefs))
|
||||
}
|
||||
}
|
||||
|
||||
return fns
|
||||
}
|
||||
|
||||
// generate tests that asserts prototype <> implementation relationships on a particular set of
|
||||
// locations all referring to the same SCIP symbol
|
||||
func makeProtoImplsTests(symbolName string, prototype Location, implementations []Location) (fns []queryFunc) {
|
||||
fns = append(fns,
|
||||
// N.B.: unlike defs/refs tests, prototypes don't "implement" themselves so we do not
|
||||
// assert that prototypes of a prototype is an identity function (unlike def -> def).
|
||||
makeImplsTest(symbolName, "prototype", prototype, implementations),
|
||||
)
|
||||
|
||||
for _, implementation := range implementations {
|
||||
fns = append(fns,
|
||||
// N.B.: unlike defs/refs tests, sibling implementations do not "implement" each other
|
||||
// so we do not assert implementations can jump to siblings without first going to the
|
||||
// prototype.
|
||||
makeProtosTest(symbolName, "implementation", implementation, []Location{prototype}),
|
||||
)
|
||||
}
|
||||
|
||||
return fns
|
||||
}
|
||||
|
||||
// generate tests that asserts the definitions at the given source location
|
||||
func makeDefsTest(symbolName, target string, source Location, expectedResults []Location) queryFunc {
|
||||
return makeTestFunc(fmt.Sprintf("definitions of %s from %s", symbolName, target), queryDefinitions, source, expectedResults)
|
||||
}
|
||||
|
||||
// generate tests that asserts the references at the given source location
|
||||
func makeRefsTest(symbolName, target string, source Location, expectedResults []Location) queryFunc {
|
||||
return makeTestFunc(fmt.Sprintf("references of %s from %s", symbolName, target), queryReferences, source, expectedResults)
|
||||
}
|
||||
|
||||
// generate tests that asserts the prototypes at the given source location
|
||||
func makeProtosTest(symbolName, target string, source Location, expectedResults []Location) queryFunc {
|
||||
return makeTestFunc(fmt.Sprintf("prototypes of %s from %s", symbolName, target), queryPrototypes, source, expectedResults)
|
||||
}
|
||||
|
||||
// generate tests that asserts the implementations at the given source location
|
||||
func makeImplsTest(symbolName, target string, source Location, expectedResults []Location) queryFunc {
|
||||
return makeTestFunc(fmt.Sprintf("implementations of %s from %s", symbolName, target), queryImplementations, source, expectedResults)
|
||||
}
|
||||
|
||||
func l(repo, rev, path string, line, character int) Location {
|
||||
return Location{Repo: repo, Rev: rev, Path: path, Line: line, Character: character}
|
||||
}
|
||||
|
||||
func t(repo, rev, path string, line, character int, embedsAnonymousInterface bool) TaggedLocation {
|
||||
return TaggedLocation{
|
||||
Location: l(repo, rev, path, line, character),
|
||||
IgnoreSiblingRelationships: embedsAnonymousInterface,
|
||||
}
|
||||
}
|
||||
@ -76,7 +76,7 @@ func monitor(ctx context.Context, repoNames []string, uploads []uploadMeta) erro
|
||||
}
|
||||
|
||||
if oldState != "COMPLETED" {
|
||||
fmt.Printf("[%5s] %s Finished processing index %s for %s@%s\n", internal.TimeSince(start), internal.EmojiSuccess, uploadState.upload.id, repoName, uploadState.upload.commit[:7])
|
||||
fmt.Printf("[%5s] %s Finished processing index %s for %s@%s:%s\n", internal.TimeSince(start), internal.EmojiSuccess, uploadState.upload.id, repoName, uploadState.upload.commit[:7], uploadState.upload.root)
|
||||
}
|
||||
} else if uploadState.state != "QUEUED_FOR_PROCESSING" && uploadState.state != "PROCESSING" {
|
||||
var payload struct {
|
||||
@ -91,7 +91,7 @@ func monitor(ctx context.Context, repoNames []string, uploads []uploadMeta) erro
|
||||
}
|
||||
|
||||
if err := internal.GraphQLClient().GraphQL(internal.SourcegraphAccessToken, preciseIndexesQueryFragment, nil, &payload); err != nil {
|
||||
return errors.Newf("unexpected state '%s' for %s@%s - ID %s\nAudit Logs:\n%s", uploadState.state, uploadState.upload.repoName, uploadState.upload.commit[:7], &uploadState.upload.id, errors.Wrap(err, "error getting audit logs"))
|
||||
return errors.Newf("unexpected state '%s' for %s@%s:%s - ID %s\nAudit Logs:\n%s", uploadState.state, uploadState.upload.repoName, uploadState.upload.commit[:7], uploadState.upload.root, &uploadState.upload.id, errors.Wrap(err, "error getting audit logs"))
|
||||
}
|
||||
|
||||
var dst bytes.Buffer
|
||||
@ -126,7 +126,7 @@ func monitor(ctx context.Context, repoNames []string, uploads []uploadMeta) erro
|
||||
fmt.Printf("DUMP:\n\n%s\n\n\n", out)
|
||||
}
|
||||
|
||||
return errors.Newf("unexpected state '%s' for %s (%s@%s)\nAudit Logs:\n%s", uploadState.state, uploadState.upload.id, uploadState.upload.repoName, uploadState.upload.commit[:7], logs)
|
||||
return errors.Newf("unexpected state '%s' for %s (%s@%s:%s)\nAudit Logs:\n%s", uploadState.state, uploadState.upload.id, uploadState.upload.repoName, uploadState.upload.commit[:7], uploadState.upload.root, logs)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -19,13 +19,14 @@ type uploadMeta struct {
|
||||
id string
|
||||
repoName string
|
||||
commit string
|
||||
root string
|
||||
}
|
||||
|
||||
// uploadAll uploads the dumps for the commits present in the given commitsByRepo map.
|
||||
// Uploads are performed concurrently given the limiter instance as well as the set of
|
||||
// flags supplied by the user. This function returns a slice of uploadMeta containing
|
||||
// the graphql identifier of the uploaded resources.
|
||||
func uploadAll(ctx context.Context, extensionAndCommitsByRepo map[string][]internal.ExtensionAndCommit, limiter *internal.Limiter) ([]uploadMeta, error) {
|
||||
func uploadAll(ctx context.Context, extensionAndCommitsByRepo map[string][]internal.ExtensionCommitAndRoot, limiter *internal.Limiter) ([]uploadMeta, error) {
|
||||
n := 0
|
||||
for _, commits := range extensionAndCommitsByRepo {
|
||||
n += len(commits)
|
||||
@ -36,9 +37,10 @@ func uploadAll(ctx context.Context, extensionAndCommitsByRepo map[string][]inter
|
||||
uploadCh := make(chan uploadMeta, n)
|
||||
|
||||
for repoName, extensionAndCommits := range extensionAndCommitsByRepo {
|
||||
for i, extensionAndCommit := range extensionAndCommits {
|
||||
commit := extensionAndCommit.Commit
|
||||
extension := extensionAndCommit.Extension
|
||||
for _, extensionCommitAndRoot := range extensionAndCommits {
|
||||
commit := extensionCommitAndRoot.Commit
|
||||
extension := extensionCommitAndRoot.Extension
|
||||
root := extensionCommitAndRoot.Root
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
@ -51,22 +53,24 @@ func uploadAll(ctx context.Context, extensionAndCommitsByRepo map[string][]inter
|
||||
}
|
||||
defer limiter.Release()
|
||||
|
||||
fmt.Printf("[%5s] %s Uploading index for %s@%s\n", internal.TimeSince(start), internal.EmojiLightbulb, repoName, commit[:7])
|
||||
fmt.Printf("[%5s] %s Uploading index for %s@%s:%s\n", internal.TimeSince(start), internal.EmojiLightbulb, repoName, commit[:7], root)
|
||||
|
||||
graphqlID, err := upload(ctx, internal.MakeTestRepoName(repoName), commit, file)
|
||||
cleanedRoot := strings.ReplaceAll(root, "_", "/")
|
||||
graphqlID, err := upload(ctx, internal.MakeTestRepoName(repoName), commit, file, cleanedRoot)
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("[%5s] %s Finished uploading index %s for %s@%s\n", internal.TimeSince(start), internal.EmojiSuccess, graphqlID, repoName, commit[:7])
|
||||
fmt.Printf("[%5s] %s Finished uploading index %s for %s@%s:%s\n", internal.TimeSince(start), internal.EmojiSuccess, graphqlID, repoName, commit[:7], cleanedRoot)
|
||||
|
||||
uploadCh <- uploadMeta{
|
||||
id: graphqlID,
|
||||
repoName: repoName,
|
||||
commit: commit,
|
||||
root: cleanedRoot,
|
||||
}
|
||||
}(repoName, commit, fmt.Sprintf("%s.%d.%s.%s", strings.Replace(repoName, "/", ".", 1), i, commit, extension))
|
||||
}(repoName, commit, fmt.Sprintf("%s.%s.%s.%s", strings.Replace(repoName, "/", ".", 1), commit, root, extension))
|
||||
}
|
||||
}
|
||||
|
||||
@ -90,9 +94,9 @@ func uploadAll(ctx context.Context, extensionAndCommitsByRepo map[string][]inter
|
||||
|
||||
// upload invokes `src code-intel upload` on the host and returns the graphql identifier of
|
||||
// the uploaded resource.
|
||||
func upload(ctx context.Context, repoName, commit, file string) (string, error) {
|
||||
func upload(ctx context.Context, repoName, commit, file, root string) (string, error) {
|
||||
argMap := map[string]string{
|
||||
"root": "/",
|
||||
"root": root,
|
||||
"repo": repoName,
|
||||
"commit": commit,
|
||||
"file": file,
|
||||
@ -135,7 +139,7 @@ func upload(ctx context.Context, repoName, commit, file string) (string, error)
|
||||
|
||||
output, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, fmt.Sprintf("failed to upload index for %s@%s: %s", repoName, commit, output))
|
||||
return "", errors.Wrap(err, fmt.Sprintf("failed to upload index for %s@%s:%s: %s", repoName, commit, root, output))
|
||||
}
|
||||
|
||||
resp := struct {
|
||||
|
||||
@ -7,28 +7,30 @@ import (
|
||||
"github.com/grafana/regexp"
|
||||
)
|
||||
|
||||
var indexFilenamePattern = regexp.MustCompile(`^([^.]+)\.([^.]+)\.\d+\.([0-9A-Fa-f]{40})\.(scip|dump)$`)
|
||||
var indexFilenamePattern = regexp.MustCompile(`^([^.]+)\.([^.]+)\.([0-9A-Fa-f]{40})\.([^.]+)\.(scip|dump)$`)
|
||||
|
||||
type ExtensionAndCommit struct {
|
||||
type ExtensionCommitAndRoot struct {
|
||||
Extension string
|
||||
Commit string
|
||||
Root string
|
||||
}
|
||||
|
||||
// ExtensionAndCommitsByRepo returns a map from org+repository name to a slice of commit and extension
|
||||
// pairs for that repository. The repositories and commits are read from the filesystem state of the
|
||||
// index directory supplied by the user. This method assumes that index files have been downloaded or
|
||||
// generated locally.
|
||||
func ExtensionAndCommitsByRepo(indexDir string) (map[string][]ExtensionAndCommit, error) {
|
||||
func ExtensionAndCommitsByRepo(indexDir string) (map[string][]ExtensionCommitAndRoot, error) {
|
||||
infos, err := os.ReadDir(indexDir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
commitsByRepo := map[string][]ExtensionAndCommit{}
|
||||
commitsByRepo := map[string][]ExtensionCommitAndRoot{}
|
||||
for _, info := range infos {
|
||||
if matches := indexFilenamePattern.FindStringSubmatch(info.Name()); len(matches) > 0 {
|
||||
orgRepo := fmt.Sprintf("%s/%s", matches[1], matches[2])
|
||||
commitsByRepo[orgRepo] = append(commitsByRepo[orgRepo], ExtensionAndCommit{Extension: matches[4], Commit: matches[3]})
|
||||
root := matches[4]
|
||||
commitsByRepo[orgRepo] = append(commitsByRepo[orgRepo], ExtensionCommitAndRoot{Extension: matches[5], Commit: matches[3], Root: root})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -302,7 +302,7 @@ func extractOccurrenceData(document *scip.Document, occurrence *scip.Occurrence)
|
||||
for _, rel := range sym.Relationships {
|
||||
if rel.IsImplementation {
|
||||
if rel.Symbol == occurrence.Symbol {
|
||||
implementationsBySymbol[occurrence.Symbol] = struct{}{}
|
||||
implementationsBySymbol[sym.Symbol] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -378,7 +378,7 @@ func (s *store) ExtractReferenceLocationsFromPosition(ctx context.Context, locat
|
||||
}
|
||||
|
||||
func (s *store) ExtractImplementationLocationsFromPosition(ctx context.Context, locationKey LocationKey) (_ []shared.Location, _ []string, err error) {
|
||||
return s.extractLocationsFromPosition(ctx, extractImplementationRanges, symbolExtractDefault, s.operations.getImplementationLocations, locationKey)
|
||||
return s.extractLocationsFromPosition(ctx, extractImplementationRanges, symbolExtractImplementations, s.operations.getImplementationLocations, locationKey)
|
||||
}
|
||||
|
||||
func (s *store) ExtractPrototypeLocationsFromPosition(ctx context.Context, locationKey LocationKey) (_ []shared.Location, _ []string, err error) {
|
||||
@ -397,6 +397,20 @@ func symbolExtractDefault(document *scip.Document, symbolName string) (symbols [
|
||||
return append(symbols, symbolName)
|
||||
}
|
||||
|
||||
func symbolExtractImplementations(document *scip.Document, symbolName string) (symbols []string) {
|
||||
for _, sym := range document.Symbols {
|
||||
for _, rel := range sym.Relationships {
|
||||
if rel.IsImplementation {
|
||||
if rel.Symbol == symbolName {
|
||||
symbols = append(symbols, sym.Symbol)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return append(symbols, symbolName)
|
||||
}
|
||||
|
||||
func symbolExtractPrototype(document *scip.Document, symbolName string) (symbols []string) {
|
||||
if symbol := scip.FindSymbol(document, symbolName); symbol != nil {
|
||||
for _, rel := range symbol.Relationships {
|
||||
|
||||
@ -574,28 +574,19 @@ func TestExtractOccurrenceData(t *testing.T) {
|
||||
},
|
||||
},
|
||||
Symbols: []*scip.SymbolInformation{
|
||||
{
|
||||
Symbol: "react 17.1 main.go func1",
|
||||
Relationships: []*scip.Relationship{
|
||||
{
|
||||
Symbol: "react 17.1 main.go func1A",
|
||||
IsImplementation: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Symbol: "react 17.1 main.go func1A",
|
||||
Relationships: []*scip.Relationship{
|
||||
{
|
||||
Symbol: "react 17.1 main.go func1",
|
||||
IsDefinition: true,
|
||||
Symbol: "react 17.1 main.go func1",
|
||||
IsImplementation: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
occurrence: &scip.Occurrence{
|
||||
Symbol: "react 17.1 main.go func1A",
|
||||
Symbol: "react 17.1 main.go func1",
|
||||
SymbolRoles: 1,
|
||||
},
|
||||
expectedRanges: []*scip.Range{
|
||||
|
||||
@ -578,9 +578,16 @@ func (s *Service) prepareCandidateUploads(
|
||||
if err != nil {
|
||||
return Cursor{}, false, err
|
||||
}
|
||||
var ids []int
|
||||
idMap := make(map[int]struct{}, len(uploads)+len(cursor.VisibleUploads))
|
||||
for _, upload := range cursor.VisibleUploads {
|
||||
idMap[upload.DumpID] = struct{}{}
|
||||
}
|
||||
for _, upload := range uploads {
|
||||
ids = append(ids, upload.ID)
|
||||
idMap[upload.ID] = struct{}{}
|
||||
}
|
||||
ids := make([]int, 0, len(idMap))
|
||||
for id := range idMap {
|
||||
ids = append(ids, id)
|
||||
}
|
||||
sort.Ints(ids)
|
||||
|
||||
|
||||
@ -186,7 +186,7 @@ func TestNewGetDefinitions(t *testing.T) {
|
||||
if history := mockLsifStore.GetMinimalBulkMonikerLocationsFunc.History(); len(history) != 1 {
|
||||
t.Fatalf("unexpected call count for lsifstore.BulkMonikerResults. want=%d have=%d", 1, len(history))
|
||||
} else {
|
||||
if diff := cmp.Diff([]int{151, 152, 153}, history[0].Arg2); diff != "" {
|
||||
if diff := cmp.Diff([]int{50, 51, 52, 53, 151, 152, 153}, history[0].Arg2); diff != "" {
|
||||
t.Errorf("unexpected ids (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
@ -417,7 +417,7 @@ func TestNewGetReferences(t *testing.T) {
|
||||
if history := mockLsifStore.GetMinimalBulkMonikerLocationsFunc.History(); len(history) != 3 {
|
||||
t.Fatalf("unexpected call count for lsifstore.BulkMonikerResults. want=%d have=%d", 3, len(history))
|
||||
} else {
|
||||
if diff := cmp.Diff([]int{151, 152, 153}, history[0].Arg2); diff != "" {
|
||||
if diff := cmp.Diff([]int{50, 51, 52, 53, 151, 152, 153}, history[0].Arg2); diff != "" {
|
||||
t.Errorf("unexpected ids (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
|
||||
@ -10,10 +10,10 @@ import (
|
||||
|
||||
type CodeNavService interface {
|
||||
GetHover(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState) (_ string, _ shared.Range, _ bool, err error)
|
||||
GetReferences(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState, cursor codenav.ReferencesCursor) (_ []shared.UploadLocation, nextCursor codenav.ReferencesCursor, err error)
|
||||
GetImplementations(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState, cursor codenav.ImplementationsCursor) (_ []shared.UploadLocation, nextCursor codenav.ImplementationsCursor, err error)
|
||||
GetPrototypes(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState, cursor codenav.ImplementationsCursor) (_ []shared.UploadLocation, nextCursor codenav.ImplementationsCursor, err error)
|
||||
GetDefinitions(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState) (_ []shared.UploadLocation, err error)
|
||||
NewGetReferences(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState, cursor codenav.Cursor) (_ []shared.UploadLocation, nextCursor codenav.Cursor, err error)
|
||||
NewGetImplementations(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState, cursor codenav.Cursor) (_ []shared.UploadLocation, nextCursor codenav.Cursor, err error)
|
||||
NewGetPrototypes(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState, cursor codenav.Cursor) (_ []shared.UploadLocation, nextCursor codenav.Cursor, err error)
|
||||
NewGetDefinitions(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState) (_ []shared.UploadLocation, err error)
|
||||
GetDiagnostics(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState) (diagnosticsAtUploads []codenav.DiagnosticAtUpload, _ int, err error)
|
||||
GetRanges(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState, startLine, endLine int) (adjustedRanges []codenav.AdjustedCodeIntelligenceRange, err error)
|
||||
GetStencil(ctx context.Context, args codenav.PositionalRequestArgs, requestState codenav.RequestState) (adjustedRanges []shared.Range, err error)
|
||||
|
||||
1066
internal/codeintel/codenav/transport/graphql/mocks_test.go
generated
1066
internal/codeintel/codenav/transport/graphql/mocks_test.go
generated
File diff suppressed because it is too large
Load Diff
@ -13,12 +13,15 @@ import (
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
|
||||
const DefaultDefinitionsPageSize = 100
|
||||
|
||||
// Definitions returns the list of source locations that define the symbol at the given position.
|
||||
func (r *gitBlobLSIFDataResolver) Definitions(ctx context.Context, args *resolverstubs.LSIFQueryPositionArgs) (_ resolverstubs.LocationConnectionResolver, err error) {
|
||||
requestArgs := codenav.PositionalRequestArgs{
|
||||
RequestArgs: codenav.RequestArgs{
|
||||
RepositoryID: r.requestState.RepositoryID,
|
||||
Commit: r.requestState.Commit,
|
||||
Limit: DefaultDefinitionsPageSize,
|
||||
},
|
||||
Path: r.requestState.Path,
|
||||
Line: int(args.Line),
|
||||
@ -34,7 +37,7 @@ func (r *gitBlobLSIFDataResolver) Definitions(ctx context.Context, args *resolve
|
||||
}})
|
||||
defer endObservation()
|
||||
|
||||
def, err := r.codeNavSvc.GetDefinitions(ctx, requestArgs, r.requestState)
|
||||
def, err := r.codeNavSvc.NewGetDefinitions(ctx, requestArgs, r.requestState)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "codeNavSvc.GetDefinitions")
|
||||
}
|
||||
|
||||
@ -50,18 +50,18 @@ func (r *gitBlobLSIFDataResolver) Implementations(ctx context.Context, args *res
|
||||
// is used to resolve each page. This cursor will be modified in-place to become the
|
||||
// cursor used to fetch the subsequent page of results in this result set.
|
||||
var nextCursor string
|
||||
cursor, err := decodeImplementationsCursor(rawCursor)
|
||||
cursor, err := decodeTraversalCursor(rawCursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, fmt.Sprintf("invalid cursor: %q", rawCursor))
|
||||
}
|
||||
|
||||
impls, implsCursor, err := r.codeNavSvc.GetImplementations(ctx, requestArgs, r.requestState, cursor)
|
||||
impls, implsCursor, err := r.codeNavSvc.NewGetImplementations(ctx, requestArgs, r.requestState, cursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "codeNavSvc.GetImplementations")
|
||||
}
|
||||
|
||||
if implsCursor.Phase != "done" {
|
||||
nextCursor = encodeImplementationsCursor(implsCursor)
|
||||
nextCursor = encodeTraversalCursor(implsCursor)
|
||||
}
|
||||
|
||||
if args.Filter != nil && *args.Filter != "" {
|
||||
@ -107,18 +107,18 @@ func (r *gitBlobLSIFDataResolver) Prototypes(ctx context.Context, args *resolver
|
||||
// is used to resolve each page. This cursor will be modified in-place to become the
|
||||
// cursor used to fetch the subsequent page of results in this result set.
|
||||
var nextCursor string
|
||||
cursor, err := decodeImplementationsCursor(rawCursor)
|
||||
cursor, err := decodeTraversalCursor(rawCursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, fmt.Sprintf("invalid cursor: %q", rawCursor))
|
||||
}
|
||||
|
||||
prototypes, protoCursor, err := r.codeNavSvc.GetPrototypes(ctx, requestArgs, r.requestState, cursor)
|
||||
prototypes, protoCursor, err := r.codeNavSvc.NewGetPrototypes(ctx, requestArgs, r.requestState, cursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "codeNavSvc.GetPrototypes")
|
||||
}
|
||||
|
||||
if protoCursor.Phase != "done" {
|
||||
nextCursor = encodeImplementationsCursor(protoCursor)
|
||||
nextCursor = encodeTraversalCursor(protoCursor)
|
||||
}
|
||||
|
||||
if args.Filter != nil && *args.Filter != "" {
|
||||
|
||||
@ -47,18 +47,18 @@ func (r *gitBlobLSIFDataResolver) References(ctx context.Context, args *resolver
|
||||
// is used to resolve each page. This cursor will be modified in-place to become the
|
||||
// cursor used to fetch the subsequent page of results in this result set.
|
||||
var nextCursor string
|
||||
cursor, err := decodeReferencesCursor(requestArgs.RawCursor)
|
||||
cursor, err := decodeTraversalCursor(requestArgs.RawCursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, fmt.Sprintf("invalid cursor: %q", rawCursor))
|
||||
}
|
||||
|
||||
refs, refCursor, err := r.codeNavSvc.GetReferences(ctx, requestArgs, r.requestState, cursor)
|
||||
refs, refCursor, err := r.codeNavSvc.NewGetReferences(ctx, requestArgs, r.requestState, cursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "svc.GetReferences")
|
||||
}
|
||||
|
||||
if refCursor.Phase != "done" {
|
||||
nextCursor = encodeReferencesCursor(refCursor)
|
||||
nextCursor = encodeTraversalCursor(refCursor)
|
||||
}
|
||||
|
||||
if args.Filter != nil && *args.Filter != "" {
|
||||
@ -77,6 +77,29 @@ func (r *gitBlobLSIFDataResolver) References(ctx context.Context, args *resolver
|
||||
//
|
||||
//
|
||||
|
||||
func decodeTraversalCursor(rawEncoded string) (codenav.Cursor, error) {
|
||||
if rawEncoded == "" {
|
||||
return codenav.Cursor{}, nil
|
||||
}
|
||||
|
||||
raw, err := base64.RawURLEncoding.DecodeString(rawEncoded)
|
||||
if err != nil {
|
||||
return codenav.Cursor{}, err
|
||||
}
|
||||
|
||||
var cursor codenav.Cursor
|
||||
err = json.Unmarshal(raw, &cursor)
|
||||
return cursor, err
|
||||
}
|
||||
|
||||
func encodeTraversalCursor(cursor codenav.Cursor) string {
|
||||
rawEncoded, _ := json.Marshal(cursor)
|
||||
return base64.RawURLEncoding.EncodeToString(rawEncoded)
|
||||
}
|
||||
|
||||
//
|
||||
//
|
||||
|
||||
// decodeReferencesCursor is the inverse of encodeCursor. If the given encoded string is empty, then
|
||||
// a fresh cursor is returned.
|
||||
func decodeReferencesCursor(rawEncoded string) (codenav.ReferencesCursor, error) {
|
||||
|
||||
@ -81,13 +81,13 @@ func TestDefinitions(t *testing.T) {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockCodeNavService.GetDefinitionsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockCodeNavService.GetDefinitionsFunc.History()))
|
||||
if len(mockCodeNavService.NewGetDefinitionsFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockCodeNavService.NewGetDefinitionsFunc.History()))
|
||||
}
|
||||
if val := mockCodeNavService.GetDefinitionsFunc.History()[0].Arg1; val.Line != 10 {
|
||||
if val := mockCodeNavService.NewGetDefinitionsFunc.History()[0].Arg1; val.Line != 10 {
|
||||
t.Fatalf("unexpected line. want=%v have=%v", 10, val)
|
||||
}
|
||||
if val := mockCodeNavService.GetDefinitionsFunc.History()[0].Arg1; val.Character != 15 {
|
||||
if val := mockCodeNavService.NewGetDefinitionsFunc.History()[0].Arg1; val.Character != 15 {
|
||||
t.Fatalf("unexpected character. want=%d have=%v", 15, val)
|
||||
}
|
||||
}
|
||||
@ -112,8 +112,8 @@ func TestReferences(t *testing.T) {
|
||||
)
|
||||
|
||||
offset := int32(25)
|
||||
mockRefCursor := codenav.ReferencesCursor{Phase: "local"}
|
||||
encodedCursor := encodeReferencesCursor(mockRefCursor)
|
||||
mockRefCursor := codenav.Cursor{Phase: "local"}
|
||||
encodedCursor := encodeTraversalCursor(mockRefCursor)
|
||||
mockCursor := base64.StdEncoding.EncodeToString([]byte(encodedCursor))
|
||||
|
||||
args := &resolverstubs.LSIFPagedQueryPositionArgs{
|
||||
@ -128,19 +128,19 @@ func TestReferences(t *testing.T) {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockCodeNavService.GetReferencesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockCodeNavService.GetReferencesFunc.History()))
|
||||
if len(mockCodeNavService.NewGetReferencesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockCodeNavService.NewGetReferencesFunc.History()))
|
||||
}
|
||||
if val := mockCodeNavService.GetReferencesFunc.History()[0].Arg1; val.Line != 10 {
|
||||
if val := mockCodeNavService.NewGetReferencesFunc.History()[0].Arg1; val.Line != 10 {
|
||||
t.Fatalf("unexpected line. want=%v have=%v", 10, val)
|
||||
}
|
||||
if val := mockCodeNavService.GetReferencesFunc.History()[0].Arg1; val.Character != 15 {
|
||||
if val := mockCodeNavService.NewGetReferencesFunc.History()[0].Arg1; val.Character != 15 {
|
||||
t.Fatalf("unexpected character. want=%v have=%v", 15, val)
|
||||
}
|
||||
if val := mockCodeNavService.GetReferencesFunc.History()[0].Arg1; val.Limit != 25 {
|
||||
if val := mockCodeNavService.NewGetReferencesFunc.History()[0].Arg1; val.Limit != 25 {
|
||||
t.Fatalf("unexpected character. want=%v have=%v", 25, val)
|
||||
}
|
||||
if val := mockCodeNavService.GetReferencesFunc.History()[0].Arg1; val.RawCursor != encodedCursor {
|
||||
if val := mockCodeNavService.NewGetReferencesFunc.History()[0].Arg1; val.RawCursor != encodedCursor {
|
||||
t.Fatalf("unexpected character. want=%v have=%v", "test-cursor", val)
|
||||
}
|
||||
}
|
||||
@ -176,10 +176,10 @@ func TestReferencesDefaultLimit(t *testing.T) {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if len(mockCodeNavService.GetReferencesFunc.History()) != 1 {
|
||||
if len(mockCodeNavService.NewGetReferencesFunc.History()) != 1 {
|
||||
t.Fatalf("unexpected call count. want=%d have=%d", 1, len(mockCodeNavService.GetDiagnosticsFunc.History()))
|
||||
}
|
||||
if val := mockCodeNavService.GetReferencesFunc.History()[0].Arg1; val.Limit != DefaultReferencesPageSize {
|
||||
if val := mockCodeNavService.NewGetReferencesFunc.History()[0].Arg1; val.Limit != DefaultReferencesPageSize {
|
||||
t.Fatalf("unexpected limit. want=%v have=%v", DefaultReferencesPageSize, val)
|
||||
}
|
||||
}
|
||||
|
||||
@ -154,7 +154,6 @@ server_integration_test(
|
||||
"SOURCEGRAPH_LICENSE_GENERATION_KEY",
|
||||
"SOURCEGRAPH_LICENSE_KEY",
|
||||
],
|
||||
flaky = True,
|
||||
port = "7083",
|
||||
runner_src = ":codeintel_integration_test.sh",
|
||||
tags = [
|
||||
|
||||
Loading…
Reference in New Issue
Block a user