codeintel-qa: Remove bash (#46300)

This commit is contained in:
Eric Fritz 2023-01-11 07:47:50 -06:00 committed by GitHub
parent 9ec579babd
commit a9fdbf3181
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 2195 additions and 1792 deletions

View File

@ -28,11 +28,11 @@ pushd dev/codeintel-qa
echo "--- :brain: Running the test suite"
echo '--- :zero: downloading test data from GCS'
./scripts/download.sh
go run ./cmd/download
echo '--- :one: clearing existing state'
go run ./cmd/clear
echo '--- :two: integration test ./dev/codeintel-qa/cmd/upload'
go run ./cmd/upload --timeout=5m
echo '--- :three: integration test ./dev/codeintel-qa/cmd/query'
go run ./cmd/query -check-query-result=false # make queries but do not assert against expected locations
popd || exit 1
popd

View File

@ -11,26 +11,28 @@ export SOURCEGRAPH_BASE_URL="${1:-"http://localhost:7080"}"
echo '--- initializing Sourcegraph instance'
pushd internal/cmd/init-sg || exit 1
pushd internal/cmd/init-sg
go build -o "${root_dir}/init-sg"
popd || exit 1
popd
pushd dev/ci/integration/code-intel || exit 1
pushd dev/ci/integration/code-intel
"${root_dir}/init-sg" initSG
# Disable `-x` to avoid printing secrets
set +x
# shellcheck disable=SC1091
source /root/.sg_envrc
"${root_dir}/init-sg" addRepos -config repos.json
popd || exit 1
popd
pushd dev/codeintel-qa || exit 1
echo '--- downloading test data from GCS'
./scripts/download.sh
echo '--- integration test ./dev/codeintel-qa/cmd/upload'
go build ./cmd/upload
./upload --timeout=5m -verbose
echo '--- integration test ./dev/codeintel-qa/cmd/query'
go build ./cmd/query
./query -verbose
popd || exit 1
pushd dev/codeintel-qa
echo "--- :brain: Running the test suite"
echo '--- :zero: downloading test data from GCS'
go run ./cmd/download
echo '--- :one: clearing existing state'
go run ./cmd/clear
echo '--- :two: integration test ./dev/codeintel-qa/cmd/upload'
go run ./cmd/upload --timeout=5m -verbose
echo '--- :three: integration test ./dev/codeintel-qa/cmd/query'
go run ./cmd/query -verbose
popd

View File

@ -8,7 +8,8 @@ Ensure that the following tools are available on your path:
- [`src`](https://github.com/sourcegraph/src-cli)
- [`lsif-go`](https://github.com/sourcegraph/lsif-go)
- [`gsutil`](https://cloud.google.com/storage/docs/gsutil_install) (and authenticated to the `sourcegraph-dev` project)
You should have enviornment variables that authenticate you to the `sourcegraph-dev` GCS project if you plan to upload or download index files (as we do in CI).
Set:
@ -33,13 +34,13 @@ SOURCEGRAPH_SUDO_TOKEN=<YOUR SOURCEGRAPH API ACCESS TOKEN>
2. Download the test indexes by running the following command:
```
./scripts/download.sh
go run ./cmd/download
```
Alternatively, generate them by running the following command (this takes much longer):
```
./scripts/clone-and-index.sh
go run ./cmd/clone-and-index
```
If there is previous upload or index state on the target instance, they can be cleared by running the following command:
@ -67,13 +68,13 @@ If there is a change to an indexer that needs to be tested, the indexes can be r
Generate indexes by running the following command:
```
./scripts/clone-and-index.sh
go run ./cmd/clone-and-index
```
Upload the generated indexes by running the following command:
Upload the generated indexes to GCS by running the following command:
```
./scripts/upload.sh
go run ./cmd/upload-gcs
```
Or if you just want to test an indexer change locally, you can:
@ -82,4 +83,4 @@ Or if you just want to test an indexer change locally, you can:
rm -rf testdata/indexes/
```
Then rerun the testing steps described above (starting at `clone-and-index.sh`)
Then run the `clone-and-index` step described above.

View File

@ -0,0 +1,230 @@
package main
import (
"context"
"fmt"
"os"
"path/filepath"
"github.com/sourcegraph/conc/pool"
"github.com/sourcegraph/run"
"github.com/sourcegraph/sourcegraph/dev/codeintel-qa/internal"
"github.com/sourcegraph/sourcegraph/dev/sg/root"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
func main() {
if err := mainErr(context.Background()); err != nil {
fmt.Printf("%s error: %s\n", internal.EmojiFailure, err.Error())
os.Exit(1)
}
}
const (
relativeReposDir = "dev/codeintel-qa/testdata/repos"
relativeIndexesDir = "dev/codeintel-qa/testdata/indexes"
)
var repositoryMeta = []struct {
org string
name string
indexer string
revisions []string
}{
// This repository has not been changed from its upstream
{
org: "sourcegraph-testing",
name: "zap",
indexer: "lsif-go",
revisions: []string{
"a6015e13fab9b744d96085308ce4e8f11bad1996",
"2aa9fa25da83bdfff756c36a91442edc9a84576c",
},
},
// Each commit here is tagged as sg-test-1, sg-test-2, and sg-test-3, respectively. See CHANGES.md in the root of the
// repository's master branch to see a history of changes and which revisions were targeted. We specifically use replace
// directives in the project root's go.mod file to target sourcegraph-testing/zap, which has no changes of its own. This
// simulates how common forking works in the Go ecosystem (see our own use of zoekt).
//
// To ensure that the last commit in the list for each repository is visible at tip, the master branch's last commit is
// a merge commit between the true upstream tip and sg-test-3.
{
org: "sourcegraph-testing",
name: "etcd",
indexer: "lsif-go",
revisions: []string{
"4397ceb9c11be0b3e9ee0111230235c868ba581d",
"bc588b7a2e9af4f903396cdcf66f56190b9e254f",
"ad7848014a051dbe3fcd6a4cff2c7befdd16d5a8",
},
},
{
org: "sourcegraph-testing",
name: "tidb",
indexer: "lsif-go",
revisions: []string{
"8eaaa098b4e938b18485f7b1fa7d8e720b04c699",
"b5f100a179e20d5539e629bd0919d05774cb7c6a",
"9aab49176993f9dc0ed2fcb9ef7e5125518e8b98",
},
},
{
org: "sourcegraph-testing",
name: "titan",
indexer: "lsif-go",
revisions: []string{
"fb38de395ba67f49978b218e099de1c45122fb50",
"415ffd5a3ba7a92a07cd96c7d9f4b734f61248f7",
"f8307e394c512b4263fc0cd67ccf9fd46f1ad9a5",
},
},
}
func mainErr(ctx context.Context) error {
if err := cloneAll(ctx); err != nil {
return err
}
if err := indexAll(ctx); err != nil {
return err
}
return nil
}
func cloneAll(ctx context.Context) error {
p := pool.New().WithErrors()
for _, meta := range repositoryMeta {
org, name := meta.org, meta.name
p.Go(func() error { return clone(ctx, org, name) })
}
return p.Wait()
}
func clone(ctx context.Context, org, name string) error {
repoRoot, err := root.RepositoryRoot()
if err != nil {
return err
}
reposDir := filepath.Join(repoRoot, relativeReposDir)
if ok, err := internal.FileExists(filepath.Join(reposDir, name)); err != nil {
return err
} else if ok {
fmt.Printf("Repository %q already cloned\n", name)
return nil
}
fmt.Printf("Cloning %q\n", name)
if err := os.MkdirAll(reposDir, os.ModePerm); err != nil {
return err
}
if err := run.Bash(ctx, "git", "clone", fmt.Sprintf("https://github.com/%s/%s.git", org, name)).Dir(reposDir).Run().Wait(); err != nil {
return err
}
fmt.Printf("Finished cloning %q\n", name)
return nil
}
func indexAll(ctx context.Context) error {
repoRoot, err := root.RepositoryRoot()
if err != nil {
return err
}
reposDir := filepath.Join(repoRoot, relativeReposDir)
indexesDir := filepath.Join(repoRoot, relativeIndexesDir)
if err := os.MkdirAll(indexesDir, os.ModePerm); err != nil {
return err
}
p := pool.New().WithErrors()
for _, meta := range repositoryMeta {
org, name, indexer, revisions := meta.org, meta.name, meta.indexer, meta.revisions
pair, ok := indexFunMap[indexer]
if !ok {
panic(fmt.Sprintf("unknown language %q", indexer))
}
p.Go(func() error {
for i, revision := range revisions {
revision := revision
targetFile := filepath.Join(indexesDir, fmt.Sprintf("%s.%s.%d.%s.%s", org, name, i, revision, pair.Extension))
if err := pair.IndexFunc(ctx, reposDir, targetFile, name, revision); err != nil {
return errors.Wrapf(err, "failed to index %s@%s", name, revision)
}
}
return nil
})
}
return p.Wait()
}
type IndexerPair struct {
Extension string
IndexFunc func(context.Context, string, string, string, string) error
}
var indexFunMap = map[string]IndexerPair{
"lsif-go": {"dump", indexGoWithLSIF},
}
func indexGoWithLSIF(ctx context.Context, reposDir, targetFile, name, revision string) error {
return indexGeneric(ctx, reposDir, targetFile, name, revision, func(repoCopyDir string) error {
if err := run.Bash(ctx, "go", "mod", "tidy").Dir(repoCopyDir).Run().Wait(); err != nil {
return err
}
if err := run.Bash(ctx, "go", "mod", "vendor").Dir(repoCopyDir).Run().Wait(); err != nil {
return err
}
// --repository-root=. is necessary here as the temp dir might be within a strange
// nest of symlinks on MacOS, which confuses the repository root detection in lsif-go.
if err := run.Bash(ctx, "lsif-go", "--repository-root=.", "-o", targetFile).Dir(repoCopyDir).Run().Wait(); err != nil {
return err
}
return nil
})
}
func indexGeneric(ctx context.Context, reposDir, targetFile, name, revision string, index func(repoCopyDir string) error) error {
if ok, err := internal.FileExists(targetFile); err != nil {
return err
} else if ok {
fmt.Printf("Index for %s@%s already exists\n", name, revision)
return nil
}
fmt.Printf("Indexing %s@%s\n", name, revision)
tempDir, err := os.MkdirTemp("", "codeintel-qa")
if err != nil {
return err
}
defer os.RemoveAll(tempDir)
repoDir := filepath.Join(reposDir, name)
repoCopyDir := filepath.Join(tempDir, name)
if err := run.Bash(ctx, "cp", "-r", repoDir, tempDir).Run().Wait(); err != nil {
return err
}
if err := run.Bash(ctx, "git", "checkout", revision).Dir(repoCopyDir).Run().Wait(); err != nil {
return err
}
if err := index(repoCopyDir); err != nil {
return err
}
fmt.Printf("Finished indexing %s@%s\n", name, revision)
return nil
}

View File

@ -0,0 +1,126 @@
package main
import (
"compress/gzip"
"context"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"cloud.google.com/go/storage"
"github.com/sourcegraph/conc/pool"
"google.golang.org/api/iterator"
"github.com/sourcegraph/sourcegraph/dev/codeintel-qa/internal"
"github.com/sourcegraph/sourcegraph/dev/sg/root"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
func main() {
if err := mainErr(context.Background()); err != nil {
fmt.Printf("%s error: %s\n", internal.EmojiFailure, err.Error())
os.Exit(1)
}
}
const (
bucketName = "codeintel-qa-indexes"
relativeIndexesDir = "dev/codeintel-qa/testdata/indexes"
)
func mainErr(ctx context.Context) error {
client, err := storage.NewClient(ctx)
if err != nil {
return err
}
bucket := client.Bucket(bucketName)
paths, err := getPaths(ctx, bucket)
if err != nil {
return err
}
if err := downloadAll(ctx, bucket, paths); err != nil {
return err
}
return nil
}
func getPaths(ctx context.Context, bucket *storage.BucketHandle) (paths []string, _ error) {
objects := bucket.Objects(ctx, &storage.Query{})
for {
attrs, err := objects.Next()
if err != nil {
if err == iterator.Done {
break
}
return nil, err
}
paths = append(paths, attrs.Name)
}
return paths, nil
}
func downloadAll(ctx context.Context, bucket *storage.BucketHandle, paths []string) error {
repoRoot, err := root.RepositoryRoot()
if err != nil {
return err
}
indexesDir := filepath.Join(repoRoot, relativeIndexesDir)
if err := os.MkdirAll(indexesDir, os.ModePerm); err != nil {
return err
}
p := pool.New().WithErrors()
for _, path := range paths {
path := path
p.Go(func() error { return downloadIndex(ctx, bucket, indexesDir, path) })
}
return p.Wait()
}
func downloadIndex(ctx context.Context, bucket *storage.BucketHandle, indexesDir, name string) (err error) {
targetFile := filepath.Join(indexesDir, strings.TrimSuffix(name, ".gz"))
if ok, err := internal.FileExists(targetFile); err != nil {
return err
} else if ok {
fmt.Printf("Index %q already downloaded\n", name)
return nil
}
fmt.Printf("Downloading %q\n", name)
f, err := os.OpenFile(targetFile, os.O_WRONLY|os.O_CREATE, os.ModePerm)
if err != nil {
return err
}
defer func() { err = errors.Append(err, f.Close()) }()
r, err := bucket.Object(name).NewReader(ctx)
if err != nil {
return err
}
defer func() { err = errors.Append(err, r.Close()) }()
gzipReader, err := gzip.NewReader(r)
if err != nil {
return err
}
defer func() { err = errors.Append(err, gzipReader.Close()) }()
if _, err := io.Copy(f, gzipReader); err != nil {
return err
}
fmt.Printf("Finished downloading %q\n", name)
return nil
}

View File

@ -19,7 +19,10 @@ func buildQueries() <-chan queryFunc {
defer close(fns)
for _, testCase := range testCases {
// Definition returns defintion
referencesWithDefinition := append([]Location{testCase.Definition}, testCase.References...)
sortLocations(referencesWithDefinition)
// Definition returns definition
fns <- makeTestFunc("def -> def", queryDefinitions, testCase.Definition, []Location{testCase.Definition})
// References return definition
@ -27,13 +30,18 @@ func buildQueries() <-chan queryFunc {
fns <- makeTestFunc("refs -> def", queryDefinitions, reference, []Location{testCase.Definition})
}
// Definition returns references
fns <- makeTestFunc("def -> refs", queryReferences, testCase.Definition, testCase.References)
// Definition returns references (including definition)
fns <- makeTestFunc("def -> refs", queryReferences, testCase.Definition, referencesWithDefinition)
// References return references
if queryReferencesOfReferences {
for _, reference := range testCase.References {
fns <- makeTestFunc("refs -> refs", queryReferences, reference, testCase.References)
references := testCase.References
if reference.Repo == testCase.Definition.Repo && reference.Rev == testCase.Definition.Rev && reference.Path == testCase.Definition.Path {
references = referencesWithDefinition
}
fns <- makeTestFunc("refs -> refs", queryReferences, reference, references)
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,98 @@
package main
import (
"compress/gzip"
"context"
"fmt"
"io"
"os"
"path/filepath"
"cloud.google.com/go/storage"
"github.com/sourcegraph/conc/pool"
"github.com/sourcegraph/sourcegraph/dev/codeintel-qa/internal"
"github.com/sourcegraph/sourcegraph/dev/sg/root"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
func main() {
if err := mainErr(context.Background()); err != nil {
fmt.Printf("%s error: %s\n", internal.EmojiFailure, err.Error())
os.Exit(1)
}
}
const (
bucketName = "codeintel-qa-indexes"
relativeIndexesDir = "dev/codeintel-qa/testdata/indexes"
)
func mainErr(ctx context.Context) error {
repoRoot, err := root.RepositoryRoot()
if err != nil {
return err
}
indexesDir := filepath.Join(repoRoot, relativeIndexesDir)
names, err := getNames(ctx, indexesDir)
if err != nil {
return err
}
if err := uploadAll(ctx, indexesDir, names); err != nil {
return err
}
return nil
}
func getNames(ctx context.Context, indexesDir string) (names []string, _ error) {
entries, err := os.ReadDir(indexesDir)
if err != nil {
return nil, err
}
for _, entry := range entries {
names = append(names, entry.Name())
}
return names, nil
}
func uploadAll(ctx context.Context, indexesDir string, names []string) error {
client, err := storage.NewClient(ctx)
if err != nil {
return err
}
bucket := client.Bucket(bucketName)
p := pool.New().WithErrors()
for _, name := range names {
name := name
p.Go(func() error { return uploadIndex(ctx, bucket, indexesDir, name) })
}
return p.Wait()
}
func uploadIndex(ctx context.Context, bucket *storage.BucketHandle, indexesDir, name string) (err error) {
f, err := os.Open(filepath.Join(indexesDir, name))
if err != nil {
return err
}
defer f.Close()
w := bucket.Object(name + ".gz").NewWriter(ctx)
defer func() { err = errors.Append(err, w.Close()) }()
gzipWriter := gzip.NewWriter(w)
defer func() { err = errors.Append(err, gzipWriter.Close()) }()
if _, err := io.Copy(gzipWriter, f); err != nil {
return err
}
return nil
}

View File

@ -4,7 +4,10 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"sync"
@ -60,7 +63,7 @@ func uploadAll(ctx context.Context, commitsByRepo map[string][]string, limiter *
repoName: repoName,
commit: commit,
}
}(repoName, commit, fmt.Sprintf("%s.%d.%s.dump", repoName, i, commit))
}(repoName, commit, fmt.Sprintf("%s.%d.%s.dump", strings.Replace(repoName, "/", ".", 1), i, commit))
}
}
@ -97,8 +100,32 @@ func upload(ctx context.Context, repoName, commit, file string) (string, error)
args = append(args, fmt.Sprintf("-%s=%s", k, v))
}
tempDir, err := os.MkdirTemp("", "codeintel-qa")
if err != nil {
return "", err
}
defer os.RemoveAll(tempDir)
src, err := os.Open(filepath.Join(indexDir, file))
if err != nil {
return "", err
}
defer src.Close()
dst, err := os.OpenFile(filepath.Join(tempDir, file), os.O_CREATE|os.O_WRONLY, os.ModePerm)
if err != nil {
return "", err
}
if _, err := io.Copy(dst, src); err != nil {
_ = dst.Close()
return "", err
}
if err := dst.Close(); err != nil {
return "", err
}
cmd := exec.CommandContext(ctx, "src", append([]string{"lsif", "upload", "-json"}, args...)...)
cmd.Dir = indexDir
cmd.Dir = tempDir
cmd.Env = []string{
fmt.Sprintf("SRC_ENDPOINT=%s", internal.SourcegraphEndpoint),
fmt.Sprintf("SRC_ACCESS_TOKEN=%s", internal.SourcegraphAccessToken),
@ -106,7 +133,7 @@ func upload(ctx context.Context, repoName, commit, file string) (string, error)
output, err := cmd.CombinedOutput()
if err != nil {
return "", errors.Wrap(err, fmt.Sprintf("failed to upload index: %s", output))
return "", errors.Wrap(err, fmt.Sprintf("failed to upload index for %s@%s: %s", repoName, commit, output))
}
resp := struct {

View File

@ -0,0 +1,15 @@
package internal
import "os"
func FileExists(path string) (bool, error) {
if _, err := os.Stat(path); err != nil {
if !os.IsNotExist(err) {
return false, err
}
return false, nil
}
return true, nil
}

View File

@ -1,17 +1,17 @@
package internal
import (
"fmt"
"os"
"github.com/grafana/regexp"
)
var indexFilenamePattern = regexp.MustCompile(`^(.+)\.\d+\.([0-9A-Fa-f]{40})\.dump$`)
var indexFilenamePattern = regexp.MustCompile(`^([^.]+)\.([^.]+)\.\d+\.([0-9A-Fa-f]{40})\.dump$`)
// CommitsByRepo returns a map from repository name to a slice of commits for that repository.
// The repositories and commits are read from the filesystem state of the index directory
// supplied by the user. This method assumes that index files have been downloaded or generated
// locally.
// CommitsByRepo returns a map from org+repository name to a slice of commits for that repository. The
// repositories and commits are read from the filesystem state of the index directory supplied by the user.
// This method assumes that index files have been downloaded or generated locally.
func CommitsByRepo(indexDir string) (map[string][]string, error) {
infos, err := os.ReadDir(indexDir)
if err != nil {
@ -21,7 +21,8 @@ func CommitsByRepo(indexDir string) (map[string][]string, error) {
commitsByRepo := map[string][]string{}
for _, info := range infos {
if matches := indexFilenamePattern.FindStringSubmatch(info.Name()); len(matches) > 0 {
commitsByRepo[matches[1]] = append(commitsByRepo[matches[1]], matches[2])
orgRepo := fmt.Sprintf("%s/%s", matches[1], matches[2])
commitsByRepo[orgRepo] = append(commitsByRepo[orgRepo], matches[3])
}
}

View File

@ -1,10 +1,13 @@
package internal
import "context"
import (
"context"
)
// Limiter implements a counting semaphore.
type Limiter struct {
ch chan struct{}
concurrency int
ch chan struct{}
}
// NewLimiter creates a new limiter with the given maximum concurrency.
@ -14,7 +17,7 @@ func NewLimiter(concurrency int) *Limiter {
ch <- struct{}{}
}
return &Limiter{ch: ch}
return &Limiter{concurrency, ch}
}
// Acquire blocks until it can acquire a value from the inner channel.
@ -35,5 +38,10 @@ func (l *Limiter) Release() {
// Close closes the underlying channel.
func (l *Limiter) Close() {
// Drain the channel before close
for i := 0; i < l.concurrency; i++ {
<-l.ch
}
close(l.ch)
}

View File

@ -10,8 +10,7 @@ func TimeSince(start time.Time) time.Duration {
return time.Since(start) / time.Second * time.Second
}
// MakeTestRepoName returns the given repo name as a fully qualified repository name in the
// sourcegraph-testing GitHub organization.
func MakeTestRepoName(repoName string) string {
return fmt.Sprintf("github.com/%s/%s", "sourcegraph-testing", repoName)
// MakeTestRepoName returns the given repo name as a fully qualified repository name.
func MakeTestRepoName(orgAndRepoName string) string {
return fmt.Sprintf("github.com/%s", orgAndRepoName)
}

View File

@ -1,38 +0,0 @@
#!/usr/bin/env bash
set -eu
cd "$(dirname "${BASH_SOURCE[0]}")/../../.."
SCRIPTDIR=$(realpath './dev/codeintel-qa/scripts')
declare -A REVS=(
# This repository has not been changed
[zap]='a6015e13fab9b744d96085308ce4e8f11bad1996 2aa9fa25da83bdfff756c36a91442edc9a84576c'
# Each commit here is tagged as sg-test-1, sg-test-2, and sg-test-3, respectively. See CHANGES.md in the root of the
# repository's master branch to see a history of changes and which revisions were targeted. We specifically use replace
# directives in the project root's go.mod file to target sourcegraph-testing/zap, which has no changes of its own. This
# simulates how common forking works in the Go ecosystem (see our own use of zoekt).
#
# To ensure that the last commit in the list for each repository is visible at tip, the master branch's last commit is
# a merge commit between the true upstream tip and sg-test-3.
[etcd]='4397ceb9c11be0b3e9ee0111230235c868ba581d bc588b7a2e9af4f903396cdcf66f56190b9e254f ad7848014a051dbe3fcd6a4cff2c7befdd16d5a8'
[tidb]='8eaaa098b4e938b18485f7b1fa7d8e720b04c699 b5f100a179e20d5539e629bd0919d05774cb7c6a 9aab49176993f9dc0ed2fcb9ef7e5125518e8b98'
[titan]='fb38de395ba67f49978b218e099de1c45122fb50 415ffd5a3ba7a92a07cd96c7d9f4b734f61248f7 f8307e394c512b4263fc0cd67ccf9fd46f1ad9a5'
)
KEYS=()
VALS=()
IDXS=()
for k in "${!REVS[@]}"; do
i=0
for v in ${REVS[$k]}; do
KEYS+=("${k}")
VALS+=("${v}")
IDXS+=("${i}")
((i = i + 1))
done
done
./dev/ci/parallel_run.sh "${SCRIPTDIR}/clone.sh" {} ::: "${!REVS[@]}"
./dev/ci/parallel_run.sh "${SCRIPTDIR}/go-index.sh" {} {} ::: "${KEYS[@]}" :::+ "${IDXS[@]}" :::+ "${VALS[@]}"

View File

@ -1,20 +0,0 @@
#!/usr/bin/env bash
set -eu
cd "$(dirname "${BASH_SOURCE[0]}")/../../.."
DATADIR=$(realpath './dev/codeintel-qa/testdata')
REPODIR="${DATADIR}/repos"
NAME="$1"
# Early-out if there's already a dump file
if [ -d "${REPODIR}/${NAME}" ]; then
exit 0
fi
# Ensure target dir exists
mkdir -p "${REPODIR}"
# Clone repo
pushd "${REPODIR}" || exit 1
git clone "https://github.com/sourcegraph-testing/${NAME}.git"

View File

@ -1,16 +0,0 @@
#!/usr/bin/env bash
# UGH
export CLOUDSDK_PYTHON=/usr/bin/python3
set -eu
cd "$(dirname "${BASH_SOURCE[0]}")/../../.."
DATADIR=$(realpath './dev/codeintel-qa/testdata')
INDEXDIR="${DATADIR}/indexes"
# Ensure target dir exists
mkdir -p "${INDEXDIR}"
# Download all compressed index files in parallel
gsutil -m cp gs://precise-code-intel-integration-testdata/* "${INDEXDIR}"
gunzip "${INDEXDIR}"/*

View File

@ -1,38 +0,0 @@
#!/usr/bin/env bash
set -eux
cd "$(dirname "${BASH_SOURCE[0]}")/../../.."
DATADIR=$(realpath './dev/codeintel-qa/testdata')
REPODIR="${DATADIR}/repos"
INDEXDIR="${DATADIR}/indexes"
NAME="$1"
INDEX="$2"
REV="$3"
REVDIR="${REPODIR}/${NAME}-${REV}"
INDEXFILE="${INDEXDIR}/${NAME}.${INDEX}.${REV}.dump"
# Early-out if there's already a dump file
if [ -f "${INDEXFILE}" ]; then
exit 0
fi
# Ensure target dir exists
mkdir -p "${INDEXDIR}"
# Copy repo to temporary directory
cp -r "${REPODIR}/${NAME}" "${REVDIR}"
cleanup() {
rm -rf "${REVDIR}"
}
trap cleanup EXIT
# Check out revision
pushd "${REVDIR}" || exit 1
git checkout "${REV}" 2>/dev/null
# Index revision
go mod vendor && lsif-go -o "${INDEXFILE}"
V=$?
exit $V

View File

@ -1,11 +0,0 @@
#!/usr/bin/env bash
set -eu
cd "$(dirname "${BASH_SOURCE[0]}")/../../.."
DATADIR=$(realpath './dev/codeintel-qa/testdata')
INDEXDIR="${DATADIR}/indexes"
# Compress and upload all index files
gzip -k "${INDEXDIR}"/*
gsutil cp "${INDEXDIR}"/*.gz gs://precise-code-intel-integration-testdata
rm "${INDEXDIR}"/*.gz