mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 14:51:44 +00:00
bzl: bazelify docsite and doc/cli/reference generation (#54538)
This PR is on top of #54517 --- This PR rework how we handle the docsite and some of its related tasks: - While we _could_ avoid committing the generated files to disk, the way that docsite currently works, requires to have all files available in git, so the docsite can serve various versions. - `bazel run //doc/cli/references:write_doc_files` handles this (and it's umbreall `//dev:write_all_generated`. - Its generation is now handled by `//doc/cli/references:generate_doc` and its associated test. - `sg run docsite` now calls `bazel run //doc:serve`, we don't need to install the docsite directly anymore (but it's available under `bazel run //dev/tools:docsite` if needed). As a byproduct, we can now run `src-cli` with `bazel run //dev/tools:src-cli` which will pick the correct binary for your machine transparently. ## Test plan <!-- All pull requests REQUIRE a test plan: https://docs.sourcegraph.com/dev/background-information/testing_principles --> Locally tested + CI
This commit is contained in:
parent
7e8690f6be
commit
588463afbb
15
WORKSPACE
15
WORKSPACE
@ -91,21 +91,6 @@ http_archive(
|
||||
],
|
||||
)
|
||||
|
||||
SRC_CLI_VERSION = "5.0.3"
|
||||
|
||||
http_archive(
|
||||
name = "src-cli-linux-amd64",
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "src-cli-linux-amd64",
|
||||
srcs = ["src"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "d125d732ad4c47ae6977c49574b01cc1b3c943b2a2108142267438e829538aa3",
|
||||
url = "https://github.com/sourcegraph/src-cli/releases/download/{0}/src-cli_{0}_linux_amd64.tar.gz".format(SRC_CLI_VERSION),
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "container_structure_test",
|
||||
sha256 = "42edb647b51710cb917b5850380cc18a6c925ad195986f16e3b716887267a2d7",
|
||||
|
||||
1
dev/BUILD.bazel
generated
1
dev/BUILD.bazel
generated
@ -13,5 +13,6 @@ write_source_files(
|
||||
additional_update_targets = [
|
||||
"//lib/codeintel/lsif/protocol:write_symbol_kind",
|
||||
"//lib/codeintel/lsif/protocol:write_symbol_tag",
|
||||
"//doc/cli/references:write_doc_files",
|
||||
],
|
||||
)
|
||||
|
||||
@ -23,6 +23,7 @@ load("test_release_version.bzl", "MINIMUM_UPGRADEABLE_VERSION", "MINIMUM_UPGRADE
|
||||
load("flakes.bzl", "FLAKES")
|
||||
|
||||
load("@bazel_gazelle//:deps.bzl", "go_repository")
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
|
||||
|
||||
# Shell snippet to disable a test on the fly. Needs to be formatted before being used.
|
||||
@ -113,7 +114,6 @@ def back_compat_defs():
|
||||
version = "v0.0.0-20230620185637-63241cb1b17a",
|
||||
)
|
||||
|
||||
|
||||
# Now that we have declared a replacement for the two problematic go packages that
|
||||
# @sourcegraph_back_compat depends on, we can define the repository itself. Because it
|
||||
# comes with its Bazel rules (logical, that's just the current repository but with a different
|
||||
|
||||
@ -563,26 +563,6 @@ var installFuncs = map[string]installFunc{
|
||||
|
||||
return download.ArchivedExecutable(ctx, url, target, fmt.Sprintf("%s/jaeger-all-in-one", archiveName))
|
||||
},
|
||||
"installDocsite": func(ctx context.Context, env map[string]string) error {
|
||||
version := env["DOCSITE_VERSION"]
|
||||
if version == "" {
|
||||
return errors.New("could not find DOCSITE_VERSION in env")
|
||||
}
|
||||
root, err := root.RepositoryRoot()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
target := filepath.Join(root, fmt.Sprintf(".bin/docsite_%s", version))
|
||||
if _, err := os.Stat(target); err == nil {
|
||||
return nil
|
||||
} else if !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
archiveName := fmt.Sprintf("docsite_%s_%s_%s", version, runtime.GOOS, runtime.GOARCH)
|
||||
url := fmt.Sprintf("https://github.com/sourcegraph/docsite/releases/download/%s/%s", version, archiveName)
|
||||
_, err = download.Executable(ctx, url, target, false)
|
||||
return err
|
||||
},
|
||||
}
|
||||
|
||||
// makeEnv merges environments starting from the left, meaning the first environment will be overriden by the second one, skipping
|
||||
|
||||
@ -1,8 +1,18 @@
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file", "http_archive")
|
||||
|
||||
DOCSITE_VERSION="1.9.2"
|
||||
DOCSITE_VERSION = "1.9.2"
|
||||
SRC_CLI_VERSION = "5.1.0"
|
||||
|
||||
SRC_CLI_BUILDFILE = """
|
||||
filegroup(
|
||||
name = "src-cli-{}",
|
||||
srcs = ["src"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
"""
|
||||
|
||||
def tool_deps():
|
||||
# Docsite #
|
||||
http_file(
|
||||
name = "docsite_darwin_amd64",
|
||||
urls = ["https://github.com/sourcegraph/docsite/releases/download/v{0}/docsite_v{0}_darwin_amd64".format(DOCSITE_VERSION)],
|
||||
@ -24,3 +34,25 @@ def tool_deps():
|
||||
executable = True,
|
||||
)
|
||||
|
||||
# src-cli #
|
||||
http_archive(
|
||||
name = "src-cli-linux-amd64",
|
||||
build_file_content = SRC_CLI_BUILDFILE.format("linux-amd64"),
|
||||
sha256 = "270ddad7748c1b76f082b637e336b5c7a58af76d207168469f4b7bef957953e3",
|
||||
url = "https://github.com/sourcegraph/src-cli/releases/download/{0}/src-cli_{0}_linux_amd64.tar.gz".format(SRC_CLI_VERSION),
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "src-cli-darwin-amd64",
|
||||
build_file_content = SRC_CLI_BUILDFILE.format("darwin-amd64"),
|
||||
sha256 = "f14414e3ff4759cd1fbed0107138214f87d9a69cdb55ed1c4522704069420d9b",
|
||||
url = "https://github.com/sourcegraph/src-cli/releases/download/{0}/src-cli_{0}_darwin_amd64.tar.gz".format(SRC_CLI_VERSION),
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "src-cli-darwin-arm64",
|
||||
build_file_content = SRC_CLI_BUILDFILE.format("darwin-arm64"),
|
||||
sha256 = "93dc6c8522792ea16e3c8c81c8cf655a908118e867fda43c048c9b51f4c70e88",
|
||||
url = "https://github.com/sourcegraph/src-cli/releases/download/{0}/src-cli_{0}_darwin_arm64.tar.gz".format(SRC_CLI_VERSION),
|
||||
)
|
||||
|
||||
|
||||
12
dev/tools/BUILD.bazel
generated
12
dev/tools/BUILD.bazel
generated
@ -5,5 +5,15 @@ sh_binary(
|
||||
"@bazel_tools//src/conditions:darwin_arm64": ["@docsite_darwin_arm64//file:downloaded"],
|
||||
"@bazel_tools//src/conditions:linux_x86_64": ["@docsite_linux_amd64//file:downloaded"],
|
||||
}),
|
||||
visibility = ["//doc:__pkg__"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
sh_binary(
|
||||
name = "src-cli",
|
||||
srcs = select({
|
||||
"@bazel_tools//src/conditions:darwin_x86_64": ["@src-cli-darwin-amd64//:src-cli-darwin-amd64"],
|
||||
"@bazel_tools//src/conditions:darwin_arm64": ["@src-cli-darwin-arm64//:src-cli-darwin-arm64"],
|
||||
"@bazel_tools//src/conditions:linux_x86_64": ["@src-cli-linux-amd64//:src-cli-linux-amd64"],
|
||||
}),
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
13
doc/BUILD.bazel
generated
13
doc/BUILD.bazel
generated
@ -15,3 +15,16 @@ sh_test(
|
||||
"requires-network",
|
||||
],
|
||||
)
|
||||
|
||||
sh_binary(
|
||||
name = "serve",
|
||||
srcs = ["serve.sh"],
|
||||
args = ["$(location //dev/tools:docsite)"],
|
||||
data = [
|
||||
"//dev/tools:docsite",
|
||||
"//doc/cli/references:doc_files",
|
||||
] + glob(
|
||||
["**/*"],
|
||||
["test.sh"],
|
||||
),
|
||||
)
|
||||
|
||||
130
doc/cli/references/BUILD.bazel
generated
130
doc/cli/references/BUILD.bazel
generated
@ -1,24 +1,122 @@
|
||||
load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "references_lib",
|
||||
srcs = ["doc.go"],
|
||||
importpath = "github.com/sourcegraph/sourcegraph/doc/cli/references",
|
||||
visibility = ["//visibility:private"],
|
||||
deps = ["//lib/errors"],
|
||||
)
|
||||
|
||||
go_binary(
|
||||
name = "references",
|
||||
embed = [":references_lib"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
load("@aspect_bazel_lib//lib:directory_path.bzl", "make_directory_path")
|
||||
load("@aspect_bazel_lib//lib:copy_to_directory.bzl", "copy_to_directory")
|
||||
load("@aspect_bazel_lib//lib:write_source_files.bzl", "write_source_files")
|
||||
|
||||
filegroup(
|
||||
name = "doc_files",
|
||||
srcs = glob(
|
||||
["**/*"],
|
||||
["**/*.go"],
|
||||
[
|
||||
"**/*.go",
|
||||
"*.sh",
|
||||
],
|
||||
),
|
||||
visibility = ["//doc:__pkg__"],
|
||||
)
|
||||
|
||||
OUTPUT_FILES = [
|
||||
"admin.md",
|
||||
"api.md",
|
||||
"batch/apply.md",
|
||||
"batch/exec.md",
|
||||
"batch/index.md",
|
||||
"batch/new.md",
|
||||
"batch/preview.md",
|
||||
"batch/remote.md",
|
||||
"batch/repositories.md",
|
||||
"batch/validate.md",
|
||||
"code-intel/index.md",
|
||||
"code-intel/upload.md",
|
||||
"codeowners.md",
|
||||
"config/edit.md",
|
||||
"config/get.md",
|
||||
"config/index.md",
|
||||
"config/list.md",
|
||||
"debug.md",
|
||||
"extensions/copy.md",
|
||||
"extensions/delete.md",
|
||||
"extensions/get.md",
|
||||
"extensions/index.md",
|
||||
"extensions/list.md",
|
||||
"extensions/publish.md",
|
||||
"extsvc/create.md",
|
||||
"extsvc/edit.md",
|
||||
"extsvc/index.md",
|
||||
"extsvc/list.md",
|
||||
"index.md",
|
||||
"login.md",
|
||||
"lsif.md",
|
||||
"orgs/create.md",
|
||||
"orgs/delete.md",
|
||||
"orgs/get.md",
|
||||
"orgs/index.md",
|
||||
"orgs/list.md",
|
||||
"orgs/members/add.md",
|
||||
"orgs/members/index.md",
|
||||
"orgs/members/remove.md",
|
||||
"repos/add-metadata.md",
|
||||
"repos/delete-metadata.md",
|
||||
"repos/delete.md",
|
||||
"repos/get.md",
|
||||
"repos/index.md",
|
||||
"repos/list.md",
|
||||
"repos/update-metadata.md",
|
||||
"scout.md",
|
||||
"search.md",
|
||||
"serve-git.md",
|
||||
"snapshot.md",
|
||||
"teams.md",
|
||||
"users/create.md",
|
||||
"users/delete.md",
|
||||
"users/get.md",
|
||||
"users/index.md",
|
||||
"users/list.md",
|
||||
"users/prune.md",
|
||||
"users/tag.md",
|
||||
"validate.md",
|
||||
"version.md",
|
||||
]
|
||||
|
||||
genrule(
|
||||
name = "generate_doc",
|
||||
outs = OUTPUT_FILES,
|
||||
cmd_bash = "GOMAXPROCS=8 USER=user HOME=/home/user $(execpath //dev/tools:src-cli) doc -o=$(RULEDIR)",
|
||||
tools = ["//dev/tools:src-cli"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
sh_test(
|
||||
name = "generate_doc_test",
|
||||
size = "small",
|
||||
srcs = ["generate_doc_test.sh"],
|
||||
args = [
|
||||
"$(location //dev/tools:src-cli)",
|
||||
"$(locations :generate_doc)",
|
||||
],
|
||||
data = [
|
||||
":generate_doc",
|
||||
"//dev/tools:src-cli",
|
||||
],
|
||||
)
|
||||
|
||||
# We use a copy_to_directory macro so write_source_files inputs and outputs are not at the same
|
||||
# path, which enables the write_doc_files_diff_test to work.
|
||||
copy_to_directory(
|
||||
name = "files_copy",
|
||||
srcs = [":generate_doc"],
|
||||
)
|
||||
|
||||
write_source_files(
|
||||
name = "write_doc_files",
|
||||
files = {
|
||||
out: make_directory_path(
|
||||
out + "_directory_path",
|
||||
"files_copy",
|
||||
out,
|
||||
)
|
||||
for out in OUTPUT_FILES
|
||||
},
|
||||
suggested_update_target = "//dev:write_all_generated",
|
||||
tags = ["go_generate"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
22
doc/cli/references/admin.md
Normal file
22
doc/cli/references/admin.md
Normal file
@ -0,0 +1,22 @@
|
||||
# `src admin`
|
||||
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
'src admin' is a tool that manages an initial admin user on a new Sourcegraph instance.
|
||||
|
||||
Usage:
|
||||
|
||||
src admin create [command options]
|
||||
|
||||
The commands are:
|
||||
|
||||
create create an initial admin user
|
||||
|
||||
Use "src admin [command] -h" for more information about a command.
|
||||
|
||||
|
||||
```
|
||||
|
||||
@ -1,171 +0,0 @@
|
||||
package main
|
||||
|
||||
//go:generate go run ./doc.go
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"sort"
|
||||
|
||||
"github.com/sourcegraph/sourcegraph/lib/errors"
|
||||
)
|
||||
|
||||
func clean(base string) error {
|
||||
// Delete every Markdown file that we find, and track the directories that
|
||||
// exist.
|
||||
dirs := []string{}
|
||||
if err := filepath.Walk(base, func(fp string, info fs.FileInfo, err error) error {
|
||||
if info.IsDir() {
|
||||
dirs = append(dirs, fp)
|
||||
} else if path.Ext(fp) == ".md" {
|
||||
return os.Remove(fp)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return errors.Wrap(err, "error walking Markdown files")
|
||||
}
|
||||
|
||||
// Now iterate over the directories depth-first, removing the ones that are
|
||||
// empty.
|
||||
sort.Slice(dirs, func(i, j int) bool {
|
||||
return len(dirs[j]) < len(dirs[i])
|
||||
})
|
||||
for _, dir := range dirs {
|
||||
d, err := os.ReadDir(dir)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if len(d) == 0 {
|
||||
if err := os.Remove(dir); err != nil {
|
||||
return errors.Wrapf(err, "error removing directory %q", dir)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func get(url string, v any) error {
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "http get: %s", url)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "http read: %s", url)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(b, v)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "http json unmarshal: %s", url)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func fetchBinary() (string, error, func()) {
|
||||
fmt.Printf("fetching latest src-cli binary...\n")
|
||||
dir, err := os.MkdirTemp("", "src-cli-doc-gen")
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "creating temporary directory"), func() {}
|
||||
}
|
||||
cleanup := func() {
|
||||
os.RemoveAll(dir)
|
||||
}
|
||||
|
||||
release := struct {
|
||||
Name string
|
||||
Assets []struct {
|
||||
Name string
|
||||
URL string `json:"browser_download_url"`
|
||||
}
|
||||
}{}
|
||||
if err := get("https://api.github.com/repos/sourcegraph/src-cli/releases/latest", &release); err != nil {
|
||||
return "", errors.Wrap(err, "src-cli release metadata"), cleanup
|
||||
}
|
||||
|
||||
bin := fmt.Sprintf("src_%s_%s", runtime.GOOS, runtime.GOARCH)
|
||||
url := ""
|
||||
for _, asset := range release.Assets {
|
||||
if bin == asset.Name {
|
||||
url = asset.URL
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if url == "" {
|
||||
return "", errors.Newf("failed to find %s for src-cli release %s", bin, release.Name), cleanup
|
||||
}
|
||||
|
||||
// more succinct to use curl than pipe http.Get into file
|
||||
src := filepath.Join(dir, bin)
|
||||
srcGet := exec.Command("curl", "-L", "-o", src, url)
|
||||
if _, err := srcGet.Output(); err != nil {
|
||||
return "", errors.Wrap(err, "src-cli download"), cleanup
|
||||
}
|
||||
|
||||
if err := os.Chmod(src, 0700); err != nil {
|
||||
return "", errors.Wrap(err, "src-cli mark executable"), cleanup
|
||||
}
|
||||
|
||||
return src, nil, cleanup
|
||||
}
|
||||
|
||||
func generateDocs(binaryPath string) error {
|
||||
fmt.Println("generating docs...")
|
||||
srcDoc := exec.Command(binaryPath, "doc", "-o", ".")
|
||||
srcDoc.Env = os.Environ()
|
||||
// Always set this to 8 so the docs don't change when generated on
|
||||
// different machines.
|
||||
srcDoc.Env = append(srcDoc.Env, "GOMAXPROCS=8")
|
||||
if out, err := srcDoc.CombinedOutput(); err != nil {
|
||||
return errors.Wrapf(err, "running src doc:\n%s\n", string(out))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var srcCliBinaryPath = flag.String("binaryPath", "", "Optional path to a src-cli binary. If not provided the latest release of src-cli will be downloaded. This is primarily useful for automation and not generally for users.")
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
log.Fatalf("error getting working directory: %v", err)
|
||||
}
|
||||
|
||||
if err := clean(wd); err != nil {
|
||||
log.Fatalf("error cleaning working directory: %v", err)
|
||||
}
|
||||
|
||||
var binaryPath string
|
||||
if len(*srcCliBinaryPath) == 0 {
|
||||
// empty path means we need to generate it
|
||||
cliPath, err, cleanup := fetchBinary()
|
||||
if err != nil {
|
||||
log.Fatalf("error downloading src-cli binary: %v", err)
|
||||
}
|
||||
binaryPath = cliPath
|
||||
defer cleanup()
|
||||
} else {
|
||||
binaryPath = *srcCliBinaryPath
|
||||
}
|
||||
fmt.Printf("Using src-cli path: %s", binaryPath)
|
||||
|
||||
if err := generateDocs(binaryPath); err != nil {
|
||||
log.Fatalf("error generating documentation: %v:", err)
|
||||
}
|
||||
}
|
||||
38
doc/cli/references/generate_doc_test.sh
Executable file
38
doc/cli/references/generate_doc_test.sh
Executable file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
src_bin="$1"
|
||||
|
||||
# Array of paths for each of the outputs from the :generate_doc target.
|
||||
# shellcheck disable=SC2124
|
||||
got_files="${@:2}"
|
||||
|
||||
# Manually run src-cli doc again, so have a list of all the files
|
||||
# we expect the :generate_doc target to output.
|
||||
#
|
||||
# We put them in the ./expected folder.
|
||||
USER=nobody HOME=. "$src_bin" doc -o=expected/
|
||||
|
||||
while IFS= read -r -d '' file
|
||||
do
|
||||
want="${file##expected}"
|
||||
found="false"
|
||||
|
||||
# Loop over all files we got.
|
||||
# shellcheck disable=SC2068
|
||||
for got in ${got_files[@]}; do
|
||||
# Trim the path from the "monitoring/output" prefix
|
||||
# and test it against the expected file we're currently iterating with.
|
||||
if [[ "${got##doc/cli/references}" == "$want" ]]; then
|
||||
found="true"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# If we didn't find it, return an error.
|
||||
if [[ $found == "false" ]]; then
|
||||
echo "Couldn't find expected output $want, perhaps it's missing from the 'srcs' attribute?"
|
||||
exit 1
|
||||
fi
|
||||
done < <(find expected -name "*.md" -print0)
|
||||
@ -3,6 +3,7 @@
|
||||
## Subcommands
|
||||
|
||||
|
||||
* [`admin`](admin.md)
|
||||
* [`api`](api.md)
|
||||
* [`batch`](batch/index.md)
|
||||
* [`code-intel`](code-intel/index.md)
|
||||
@ -15,6 +16,7 @@
|
||||
* [`lsif`](lsif.md)
|
||||
* [`orgs`](orgs/index.md)
|
||||
* [`repos`](repos/index.md)
|
||||
* [`scout`](scout.md)
|
||||
* [`search`](search.md)
|
||||
* [`serve-git`](serve-git.md)
|
||||
* [`snapshot`](snapshot.md)
|
||||
|
||||
@ -8,11 +8,12 @@
|
||||
| `-dump-requests` | Log GraphQL requests and responses to stdout | `false` |
|
||||
| `-get-curl` | Print the curl command for executing this query and exit (WARNING: includes printing your access token!) | `false` |
|
||||
| `-insecure-skip-verify` | Skip validation of TLS certificates against trusted chains | `false` |
|
||||
| `-key` | The name of the key to add (required) | |
|
||||
| `-repo` | The ID of the repo to add the key-value pair to (required) | |
|
||||
| `-key` | The name of the metadata key to add (required) | |
|
||||
| `-repo` | The ID of the repo to add the key-value pair metadata to (required if -repo-name is not specified) | |
|
||||
| `-repo-name` | The name of the repo to add the key-value pair metadata to (required if -repo is not specified) | |
|
||||
| `-trace` | Log the trace ID for requests. See https://docs.sourcegraph.com/admin/observability/tracing | `false` |
|
||||
| `-user-agent-telemetry` | Include the operating system and architecture in the User-Agent sent with requests to Sourcegraph | `true` |
|
||||
| `-value` | The value associated with the key. Defaults to null. | |
|
||||
| `-value` | The metadata value associated with the metadata key. Defaults to null. | |
|
||||
|
||||
|
||||
## Usage
|
||||
@ -26,15 +27,17 @@ Usage of 'src repos add-metadata':
|
||||
-insecure-skip-verify
|
||||
Skip validation of TLS certificates against trusted chains
|
||||
-key string
|
||||
The name of the key to add (required)
|
||||
The name of the metadata key to add (required)
|
||||
-repo string
|
||||
The ID of the repo to add the key-value pair to (required)
|
||||
The ID of the repo to add the key-value pair metadata to (required if -repo-name is not specified)
|
||||
-repo-name string
|
||||
The name of the repo to add the key-value pair metadata to (required if -repo is not specified)
|
||||
-trace
|
||||
Log the trace ID for requests. See https://docs.sourcegraph.com/admin/observability/tracing
|
||||
-user-agent-telemetry
|
||||
Include the operating system and architecture in the User-Agent sent with requests to Sourcegraph (default true)
|
||||
-value string
|
||||
The value associated with the key. Defaults to null.
|
||||
The metadata value associated with the metadata key. Defaults to null.
|
||||
|
||||
Examples:
|
||||
|
||||
@ -44,6 +47,8 @@ Examples:
|
||||
|
||||
Omitting -value will create a tag (a key with a null value).
|
||||
|
||||
[DEPRECATED] Note that 'add-kvp' is deprecated and will be removed in future release. Use 'add-metadata' instead.
|
||||
|
||||
|
||||
```
|
||||
|
||||
@ -8,8 +8,9 @@
|
||||
| `-dump-requests` | Log GraphQL requests and responses to stdout | `false` |
|
||||
| `-get-curl` | Print the curl command for executing this query and exit (WARNING: includes printing your access token!) | `false` |
|
||||
| `-insecure-skip-verify` | Skip validation of TLS certificates against trusted chains | `false` |
|
||||
| `-key` | The name of the key to be deleted (required) | |
|
||||
| `-repo` | The ID of the repo with the key-value pair to be deleted (required) | |
|
||||
| `-key` | The name of the metadata key to be deleted (required) | |
|
||||
| `-repo` | The ID of the repo with the key-value pair metadata to be deleted (required if -repo-name is not specified) | |
|
||||
| `-repo-name` | The name of the repo to add the key-value pair metadata to (required if -repo is not specified) | |
|
||||
| `-trace` | Log the trace ID for requests. See https://docs.sourcegraph.com/admin/observability/tracing | `false` |
|
||||
| `-user-agent-telemetry` | Include the operating system and architecture in the User-Agent sent with requests to Sourcegraph | `true` |
|
||||
|
||||
@ -25,9 +26,11 @@ Usage of 'src repos delete-metadata':
|
||||
-insecure-skip-verify
|
||||
Skip validation of TLS certificates against trusted chains
|
||||
-key string
|
||||
The name of the key to be deleted (required)
|
||||
The name of the metadata key to be deleted (required)
|
||||
-repo string
|
||||
The ID of the repo with the key-value pair to be deleted (required)
|
||||
The ID of the repo with the key-value pair metadata to be deleted (required if -repo-name is not specified)
|
||||
-repo-name string
|
||||
The name of the repo to add the key-value pair metadata to (required if -repo is not specified)
|
||||
-trace
|
||||
Log the trace ID for requests. See https://docs.sourcegraph.com/admin/observability/tracing
|
||||
-user-agent-telemetry
|
||||
@ -37,8 +40,9 @@ Examples:
|
||||
|
||||
Delete a key-value pair metadata from a repository:
|
||||
|
||||
$ src repos delete-metadata -repo=repoID -key=mykey
|
||||
$ src repos delete-metadata -repo=repoID -key=mykey
|
||||
|
||||
[DEPRECATED] Note 'delete-kvp' is deprecated and will be removed in future release. Use 'delete-metadata' instead.
|
||||
|
||||
|
||||
```
|
||||
|
||||
@ -8,11 +8,12 @@
|
||||
| `-dump-requests` | Log GraphQL requests and responses to stdout | `false` |
|
||||
| `-get-curl` | Print the curl command for executing this query and exit (WARNING: includes printing your access token!) | `false` |
|
||||
| `-insecure-skip-verify` | Skip validation of TLS certificates against trusted chains | `false` |
|
||||
| `-key` | The name of the key to be updated (required) | |
|
||||
| `-repo` | The ID of the repo with the key to be updated (required) | |
|
||||
| `-key` | The name of the metadata key to be updated (required) | |
|
||||
| `-repo` | The ID of the repo with the metadata key to be updated (required if -repo-name is not specified) | |
|
||||
| `-repo-name` | The name of the repo to add the key-value pair metadata to (required if -repo is not specified) | |
|
||||
| `-trace` | Log the trace ID for requests. See https://docs.sourcegraph.com/admin/observability/tracing | `false` |
|
||||
| `-user-agent-telemetry` | Include the operating system and architecture in the User-Agent sent with requests to Sourcegraph | `true` |
|
||||
| `-value` | The new value of the key to be set. Defaults to null. | |
|
||||
| `-value` | The new metadata value of the metadata key to be set. Defaults to null. | |
|
||||
|
||||
|
||||
## Usage
|
||||
@ -26,24 +27,28 @@ Usage of 'src repos update-metadata':
|
||||
-insecure-skip-verify
|
||||
Skip validation of TLS certificates against trusted chains
|
||||
-key string
|
||||
The name of the key to be updated (required)
|
||||
The name of the metadata key to be updated (required)
|
||||
-repo string
|
||||
The ID of the repo with the key to be updated (required)
|
||||
The ID of the repo with the metadata key to be updated (required if -repo-name is not specified)
|
||||
-repo-name string
|
||||
The name of the repo to add the key-value pair metadata to (required if -repo is not specified)
|
||||
-trace
|
||||
Log the trace ID for requests. See https://docs.sourcegraph.com/admin/observability/tracing
|
||||
-user-agent-telemetry
|
||||
Include the operating system and architecture in the User-Agent sent with requests to Sourcegraph (default true)
|
||||
-value string
|
||||
The new value of the key to be set. Defaults to null.
|
||||
The new metadata value of the metadata key to be set. Defaults to null.
|
||||
|
||||
Examples:
|
||||
|
||||
Update the value metadata for a key on a repository:
|
||||
Update the metadata value for a metadata key on a repository:
|
||||
|
||||
$ src repos update-metadata -repo=repoID -key=my-key -value=new-value
|
||||
|
||||
Omitting -value will set the value of the key to null.
|
||||
|
||||
[DEPRECATED] Note that 'update-kvp' is deprecated and will be removed in future release. Use 'update-metadata' instead.
|
||||
|
||||
|
||||
```
|
||||
|
||||
27
doc/cli/references/scout.md
Normal file
27
doc/cli/references/scout.md
Normal file
@ -0,0 +1,27 @@
|
||||
# `src scout`
|
||||
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
'src scout' is a tool that provides monitoring for Sourcegraph resources
|
||||
|
||||
EXPERIMENTAL: 'scout' is an experimental command in the 'src' tool. To use, you must
|
||||
point your .kube config to your Sourcegraph instance.
|
||||
|
||||
Usage:
|
||||
|
||||
src scout command [command options]
|
||||
|
||||
The commands are:
|
||||
|
||||
resource print all known sourcegraph resources and their allocations
|
||||
usage get CPU, memory and current disk usage
|
||||
advise recommend lowering or raising resource allocations based on actual usage
|
||||
|
||||
Use "src scout [command] -h" for more information about a command.
|
||||
|
||||
|
||||
```
|
||||
|
||||
5
doc/serve.sh
Executable file
5
doc/serve.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
docsite_bin="$1"
|
||||
|
||||
"$docsite_bin" -config doc/docsite.json serve -http=localhost:5080
|
||||
@ -404,10 +404,7 @@ commands:
|
||||
|
||||
docsite:
|
||||
description: Docsite instance serving the docs
|
||||
cmd: .bin/docsite_${DOCSITE_VERSION} -config doc/docsite.json serve -http=localhost:5080
|
||||
install_func: "installDocsite"
|
||||
env:
|
||||
DOCSITE_VERSION: v1.9.2 # Update in all places (including outside this repo)
|
||||
cmd: bazel run //doc:serve
|
||||
|
||||
syntax-highlighter:
|
||||
ignoreStdout: true
|
||||
|
||||
Loading…
Reference in New Issue
Block a user