mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 19:21:50 +00:00
bzl: enact changes from rfc 793 (#53503)
See rfc793 ## Test plan <!-- All pull requests REQUIRE a test plan: https://docs.sourcegraph.com/dev/background-information/testing_principles --> CI --------- Co-authored-by: davejrt <davetry@gmail.com>
This commit is contained in:
parent
4e8e3a7907
commit
abc77ba6a5
@ -42,7 +42,6 @@ const (
|
||||
|
||||
ImagePatch // build a patched image after testing
|
||||
ImagePatchNoTest // build a patched image without testing
|
||||
CandidatesNoTest // build one or all candidate images without testing
|
||||
ExecutorPatchNoTest // build executor image without testing
|
||||
|
||||
// Special test branches
|
||||
@ -170,10 +169,6 @@ func (t RunType) Matcher() *RunTypeMatcher {
|
||||
Branch: "docker-images-patch-notest/",
|
||||
BranchArgumentRequired: true,
|
||||
}
|
||||
case CandidatesNoTest:
|
||||
return &RunTypeMatcher{
|
||||
Branch: "docker-images-candidates-notest/",
|
||||
}
|
||||
case ExecutorPatchNoTest:
|
||||
return &RunTypeMatcher{
|
||||
Branch: "executor-patch-notest/",
|
||||
@ -226,8 +221,6 @@ func (t RunType) String() string {
|
||||
return "Patch image"
|
||||
case ImagePatchNoTest:
|
||||
return "Patch image without testing"
|
||||
case CandidatesNoTest:
|
||||
return "Build all candidates without testing"
|
||||
case ExecutorPatchNoTest:
|
||||
return "Build executor without testing"
|
||||
|
||||
|
||||
@ -18,31 +18,31 @@ load("@rules_oci//oci:pull.bzl", "oci_pull")
|
||||
def oci_deps():
|
||||
oci_pull(
|
||||
name = "wolfi_base",
|
||||
digest = "sha256:0ccfd730a1918144881d5cb312874ae20bac84bdf4a90613e7b433423a40161c",
|
||||
digest = "sha256:8d80271a8d8f7b8fa7ff62b2e009ab3f0f81c5407872144db8cb30b396801853",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-sourcegraph-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_cadvisor_base",
|
||||
digest = "sha256:afb1c1179db7f89114dfc402495c34b3cf79a53e914a6f85fdd383d1808a570e",
|
||||
digest = "sha256:2e5aec9ba5a4835b4c35103bd27ad2ad3e65a064ec5001a35168044dd8c06a4a",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-cadvisor-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_symbols_base",
|
||||
digest = "sha256:2472c260bd024dd6d92bd1f3dddd0783f2e9e26e2cd325f74cb8c97f279b1ce0",
|
||||
digest = "sha256:a5d6a10698466e1a7198ca17e41a3c6c8cd7228ae562352abbdac829e539fc75",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-symbols-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_server_base",
|
||||
digest = "sha256:0cded0aabc7509d0ff02cc16f76374730e834d8aadd875f47bb4dc381d4105a4",
|
||||
digest = "sha256:b4f5d5927d4e53937fdaf8fc9b54414ee87c2e2f288e566cc25bb98771e88008",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-server-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_gitserver_base",
|
||||
digest = "sha256:ac49e90f580bd7f729c78116e384feb25e8c2e1dc7f1ed69395901e68cfd82d1",
|
||||
digest = "sha256:eae7c238c7c33d59752973b6bcb678b25dce1a759a0cece6d8350e4230d4ea49",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-gitserver-base",
|
||||
)
|
||||
|
||||
@ -54,120 +54,120 @@ def oci_deps():
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_postgres_exporter_base",
|
||||
digest = "sha256:f71d13c7b1a687a61a3954c11005b4d65773d0d857e8622d846ab83a6b29977f",
|
||||
digest = "sha256:b51ae2b70cd7cd7883e8057d69a74c959fd5f03d723538908ea8f47a0a322e02",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-postgres-exporter-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_jaeger_all_in_one_base",
|
||||
digest = "sha256:f8e416626dcdc7d14894876e0751f2bfa0653169b14c4a929b3834d30cea087d",
|
||||
digest = "sha256:6d978aa3cc31e3410088ef4a3220fe419878c0b53e604c9323b343d0044ed9d3",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-jaeger-all-in-one-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_jaeger_agent_base",
|
||||
digest = "sha256:77da62533456112d87f61b24d6694b2bb7276446e6f94642580cf9649641c4ed",
|
||||
digest = "sha256:a7dd18fa67c5c02f1f6ba6f503a2249a1fe103bfe47775a2faa041b16895c59c",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-jaeger-agent-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_redis_base",
|
||||
digest = "sha256:08e80c858fe3ef9b5ffd1c4194a771b6fd45f9831ad40dad3b5f5b53af880582",
|
||||
digest = "sha256:d72b41d737473226ddf3a752bec885caaf1bd93adaecbb33dc0cce693f261b5e",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-redis-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_redis_exporter_base",
|
||||
digest = "sha256:003f8d2411cf198de0fe3751cef7b08ed85f2ff05746097bee9cbcae304eca31",
|
||||
digest = "sha256:97924b18f530386f524df14b8172963c54d1378727cea72004bef8ae2490e871",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-redis-exporter-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_syntax_highlighter_base",
|
||||
digest = "sha256:9c4531f4f0263ad49678ff81f1094d2eb5f9b812bd93e21da19780b480fe7c52",
|
||||
digest = "sha256:06ce2e349550d2e99c96a5610746fa2a3b743790bd0c16d896847434551afead",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-syntax-highlighter-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_search_indexer_base",
|
||||
digest = "sha256:c30c2bc85aa38c9c3796038b290f3eb8fbea6b3e744f91788860de4e58bca822",
|
||||
digest = "sha256:7a3f1327e75de7d3ace2240e650b82a44f4a70bd988548786880c3eebb02143e",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-search-indexer-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_repo_updater_base",
|
||||
digest = "sha256:19445a121968c19bcd3bf5ae05cc97802853c039d00efc83f317655def510170",
|
||||
digest = "sha256:2e49220a8e69a8f1f92fe1c2da08efd35a9d7226e76220a5b39c124d8231092b",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-repo-updater-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_searcher_base",
|
||||
digest = "sha256:2d90d34644c473bcf5f998c4ce881354992bc28d0644d47e182c2475f0bb616a",
|
||||
digest = "sha256:3029998bad3b614efde5ff2dbe8287b4fa5e38cbf1a22c40b37f97f6257aed16",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-searcher-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_executor_base",
|
||||
digest = "sha256:e73c7c629307a2668b5a199bba79d315e7bd8df414e27399e723a8630c06c08c",
|
||||
digest = "sha256:03c0e699760fda087702baa090b0827471395cbf891807b1f73b48280f345041",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-executor-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_bundled_executor_base",
|
||||
digest = "sha256:a1229fd8a3511c6931293f3a7b22974741f8def858b54836590a488064cf8240",
|
||||
digest = "sha256:b9a217e4f71e767a19bed1e3d39618ed7258ea726d339776ddf1523267452c8c",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-bundled-executor-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_executor_kubernetes_base",
|
||||
digest = "sha256:8e55b6529c84bb0ff24f2d8dc889b74263bcb2584312028ba70d4ce9147d10c1",
|
||||
digest = "sha256:0cb7a64371b29f2689ab18f41a71cab51f0976de1a3b850a2d468601f8ab9c48",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-executor-kubernetes-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_batcheshelper_base",
|
||||
digest = "sha256:68dfe2e32c698f457d4f096baf13d7c052b65f80ed3163a15ab30dd2836daa88",
|
||||
digest = "sha256:3c6c8b6ef31d062c4b9faa461d4533bf0589fab7de9b89040b03e27ca25a4176",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-batcheshelper-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_prometheus_base",
|
||||
digest = "sha256:fca22248cf10c90af2445cc6627d0300dd46a23be89afd0899618f896909feaa",
|
||||
digest = "sha256:5089836fad63b647d0a1c1dbb3a10d7abdeea2f0fc76f4c977df21d26d70cf06",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-prometheus-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_postgresql-12_base",
|
||||
digest = "sha256:cf8a07db3ad8c439e85d4142b3d3f3ef394551a9077b41aea0788f4979c0a9d3",
|
||||
digest = "sha256:e3f597e118056f6c555dbb284b59bf6c29b8ebbd3a4fc6c3df7889db368855a9",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-postgresql-12-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_postgresql-12-codeinsights_base",
|
||||
digest = "sha256:6229efd204ae3869fc5f0441da54dd0d4864a972f0ebb0f1e514a18d5fdee0e8",
|
||||
digest = "sha256:78061eee8c728a9d732c1bfd6012baf5f4ad2f087acd18c17a6d749f7a0d459f",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-postgresql-12-codeinsights-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_node_exporter_base",
|
||||
digest = "sha256:4bdab308f1c538b3df4d0c2b52a1b47e56ba4fde2c5d8083e847ee86ed8f7320",
|
||||
digest = "sha256:9f7149d05afad6e3581a7a4bc13c60cad5d314bab7307e1dcd47d1c6bb42c497",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-node-exporter-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_opentelemetry_collector_base",
|
||||
digest = "sha256:e43edee16894ed6c94fc9505e7387958bad929f33842a1e7c7dba2a0fcf50aa9",
|
||||
digest = "sha256:2b410f0807c8db91ac324edf48b9b657bf7ddabfe7553d0d32d2f5e77db23a7e",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-opentelemetry-collector-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_searcher_base",
|
||||
digest = "sha256:2d90d34644c473bcf5f998c4ce881354992bc28d0644d47e182c2475f0bb616a",
|
||||
digest = "sha256:3029998bad3b614efde5ff2dbe8287b4fa5e38cbf1a22c40b37f97f6257aed16",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-searcher-base",
|
||||
)
|
||||
|
||||
oci_pull(
|
||||
name = "wolfi_s3proxy_base",
|
||||
digest = "sha256:75f055320da219bfd83695bcaa011a93c9e101f00b6100b70b04a0e03bd661a3",
|
||||
digest = "sha256:4299634c0e403059a5a2aeda323181feb8189648c23fd69d0b5d057e0e7966eb",
|
||||
image = "us.gcr.io/sourcegraph-dev/wolfi-blobstore-base",
|
||||
)
|
||||
|
||||
@ -2,12 +2,6 @@
|
||||
|
||||
## General
|
||||
|
||||
### I just used `[no-bazel]` to merge my PR
|
||||
|
||||
While using `[no-bazel]` will enable you to get your pull request merged, the subsequent builds will be with Bazel unless they also have that flag.
|
||||
|
||||
Therefore you need to follow-up quickly with a fix to ensure `main` is not broken.
|
||||
|
||||
### The analysis cache is being busted because of `--action_env`
|
||||
|
||||
Typically you'll see this (in CI or locally):
|
||||
|
||||
@ -206,17 +206,15 @@ Base pipeline (more steps might be included based on branch changes):
|
||||
|
||||
- **Metadata**: Pipeline metadata
|
||||
- **Pipeline setup**: Trigger async
|
||||
- **Image builds**: Build syntax-highlighter, Build symbols, Build Docker images, Build Docker images, Build Docker images, Build executor image, Build executor binary, Build docker registry mirror image
|
||||
- **Image security scans**: Scan executor, Scan alpine-3.14, Scan postgres-12-alpine, Scan cadvisor, Scan codeinsights-db, Scan codeintel-db, Scan frontend, Scan github-proxy, Scan gitserver, Scan grafana, Scan indexed-searcher, Scan migrator, Scan node-exporter, Scan opentelemetry-collector, Scan postgres_exporter, Scan precise-code-intel-worker, Scan prometheus, Scan prometheus-gcp, Scan redis-cache, Scan redis-store, Scan redis_exporter, Scan repo-updater, Scan search-indexer, Scan searcher, Scan syntax-highlighter, Scan worker, Scan symbols, Scan batcheshelper, Scan blobstore2, Scan bundled-executor, Scan dind, Scan embeddings, Scan executor-kubernetes, Scan executor-vm, Scan jaeger-agent, Scan jaeger-all-in-one, Scan cody-gateway, Scan sg, Scan cody-slack
|
||||
- **Image builds**: Build Docker images, Build executor image, Build executor binary, Build docker registry mirror image
|
||||
- Ensure buildfiles are up to date
|
||||
- Tests
|
||||
- BackCompat Tests
|
||||
- **Linters and static analysis**: Run sg lint
|
||||
- **Client checks**: Upload Storybook to Chromatic, Enterprise build, Build (client/jetbrains), Tests for VS Code extension, Unit and integration tests for the Cody VS Code extension, E2E tests for the Cody VS Code extension, Stylelint (all)
|
||||
- **Publish candidate images**: Push OCI/Wolfi Candidate
|
||||
- **Integration tests**: Backend integration tests (gRPC), Backend integration tests, Code Intel QA
|
||||
- **End-to-end tests**: Executors E2E, Sourcegraph E2E
|
||||
- **Publish images**: server, executor, alpine-3.14, postgres-12-alpine, blobstore, cadvisor, codeinsights-db, codeintel-db, frontend, github-proxy, gitserver, grafana, indexed-searcher, migrator, node-exporter, opentelemetry-collector, postgres_exporter, precise-code-intel-worker, prometheus, prometheus-gcp, redis-cache, redis-store, redis_exporter, repo-updater, search-indexer, searcher, syntax-highlighter, worker, symbols, batcheshelper, blobstore2, bundled-executor, dind, embeddings, executor-kubernetes, executor-vm, jaeger-agent, jaeger-all-in-one, cody-gateway, sg, cody-slack, Publish executor image, Publish executor binary, Publish docker registry mirror image, Push OCI/Wolfi
|
||||
- **Publish candidate images**: Push candidate Images
|
||||
- **End-to-end tests**: Executors E2E
|
||||
- **Publish images**: executor-vm, alpine-3.14, codeinsights-db, codeintel-db, postgres-12-alpine, Publish executor image, Publish executor binary, Publish docker registry mirror image, Push final images
|
||||
|
||||
### Release branch
|
||||
|
||||
@ -226,17 +224,15 @@ Base pipeline (more steps might be included based on branch changes):
|
||||
|
||||
- **Metadata**: Pipeline metadata
|
||||
- **Pipeline setup**: Trigger async
|
||||
- **Image builds**: Build syntax-highlighter, Build symbols, Build Docker images, Build Docker images, Build Docker images, Build executor image, Build executor binary, Build docker registry mirror image
|
||||
- **Image security scans**: Scan executor, Scan alpine-3.14, Scan postgres-12-alpine, Scan cadvisor, Scan codeinsights-db, Scan codeintel-db, Scan frontend, Scan github-proxy, Scan gitserver, Scan grafana, Scan indexed-searcher, Scan migrator, Scan node-exporter, Scan opentelemetry-collector, Scan postgres_exporter, Scan precise-code-intel-worker, Scan prometheus, Scan prometheus-gcp, Scan redis-cache, Scan redis-store, Scan redis_exporter, Scan repo-updater, Scan search-indexer, Scan searcher, Scan syntax-highlighter, Scan worker, Scan symbols, Scan batcheshelper, Scan blobstore2, Scan bundled-executor, Scan dind, Scan embeddings, Scan executor-kubernetes, Scan executor-vm, Scan jaeger-agent, Scan jaeger-all-in-one, Scan cody-gateway, Scan sg, Scan cody-slack
|
||||
- **Image builds**: Build Docker images, Build executor image, Build executor binary, Build docker registry mirror image
|
||||
- Ensure buildfiles are up to date
|
||||
- Tests
|
||||
- BackCompat Tests
|
||||
- **Linters and static analysis**: Run sg lint
|
||||
- **Client checks**: Upload Storybook to Chromatic, Enterprise build, Build (client/jetbrains), Tests for VS Code extension, Unit and integration tests for the Cody VS Code extension, E2E tests for the Cody VS Code extension, Stylelint (all)
|
||||
- **Publish candidate images**: Push OCI/Wolfi Candidate
|
||||
- **Integration tests**: Backend integration tests (gRPC), Backend integration tests, Code Intel QA
|
||||
- **End-to-end tests**: Executors E2E, Sourcegraph E2E
|
||||
- **Publish images**: server, executor, alpine-3.14, postgres-12-alpine, blobstore, cadvisor, codeinsights-db, codeintel-db, frontend, github-proxy, gitserver, grafana, indexed-searcher, migrator, node-exporter, opentelemetry-collector, postgres_exporter, precise-code-intel-worker, prometheus, prometheus-gcp, redis-cache, redis-store, redis_exporter, repo-updater, search-indexer, searcher, syntax-highlighter, worker, symbols, batcheshelper, blobstore2, bundled-executor, dind, embeddings, executor-kubernetes, executor-vm, jaeger-agent, jaeger-all-in-one, cody-gateway, sg, cody-slack, Push OCI/Wolfi
|
||||
- **Publish candidate images**: Push candidate Images
|
||||
- **End-to-end tests**: Executors E2E
|
||||
- **Publish images**: executor-vm, alpine-3.14, codeinsights-db, codeintel-db, postgres-12-alpine, Push final images
|
||||
|
||||
### Browser extension release build
|
||||
|
||||
@ -288,17 +284,15 @@ Base pipeline (more steps might be included based on branch changes):
|
||||
|
||||
- **Metadata**: Pipeline metadata
|
||||
- **Pipeline setup**: Trigger async
|
||||
- **Image builds**: Build syntax-highlighter, Build symbols, Build Docker images, Build Docker images, Build Docker images, Build executor image, Build executor binary
|
||||
- **Image security scans**: Scan executor, Scan alpine-3.14, Scan postgres-12-alpine, Scan cadvisor, Scan codeinsights-db, Scan codeintel-db, Scan frontend, Scan github-proxy, Scan gitserver, Scan grafana, Scan indexed-searcher, Scan migrator, Scan node-exporter, Scan opentelemetry-collector, Scan postgres_exporter, Scan precise-code-intel-worker, Scan prometheus, Scan prometheus-gcp, Scan redis-cache, Scan redis-store, Scan redis_exporter, Scan repo-updater, Scan search-indexer, Scan searcher, Scan syntax-highlighter, Scan worker, Scan symbols, Scan batcheshelper, Scan blobstore2, Scan bundled-executor, Scan dind, Scan embeddings, Scan executor-kubernetes, Scan executor-vm, Scan jaeger-agent, Scan jaeger-all-in-one, Scan cody-gateway, Scan sg, Scan cody-slack
|
||||
- **Image builds**: Build Docker images, Build executor image, Build executor binary
|
||||
- Ensure buildfiles are up to date
|
||||
- Tests
|
||||
- BackCompat Tests
|
||||
- **Linters and static analysis**: Run sg lint
|
||||
- **Client checks**: Upload Storybook to Chromatic, Enterprise build, Build (client/jetbrains), Tests for VS Code extension, Unit and integration tests for the Cody VS Code extension, E2E tests for the Cody VS Code extension, Stylelint (all)
|
||||
- **Publish candidate images**: Push OCI/Wolfi Candidate
|
||||
- **Integration tests**: Backend integration tests (gRPC), Backend integration tests, Code Intel QA
|
||||
- **End-to-end tests**: Executors E2E, Sourcegraph E2E
|
||||
- **Publish images**: server, executor, alpine-3.14, postgres-12-alpine, blobstore, cadvisor, codeinsights-db, codeintel-db, frontend, github-proxy, gitserver, grafana, indexed-searcher, migrator, node-exporter, opentelemetry-collector, postgres_exporter, precise-code-intel-worker, prometheus, prometheus-gcp, redis-cache, redis-store, redis_exporter, repo-updater, search-indexer, searcher, syntax-highlighter, worker, symbols, batcheshelper, blobstore2, bundled-executor, dind, embeddings, executor-kubernetes, executor-vm, jaeger-agent, jaeger-all-in-one, cody-gateway, sg, cody-slack, Publish executor image, Publish executor binary, Push OCI/Wolfi
|
||||
- **Publish candidate images**: Push candidate Images
|
||||
- **End-to-end tests**: Executors E2E
|
||||
- **Publish images**: executor-vm, alpine-3.14, codeinsights-db, codeintel-db, postgres-12-alpine, Publish executor image, Publish executor binary, Push final images
|
||||
|
||||
### Main dry run
|
||||
|
||||
@ -313,17 +307,15 @@ Base pipeline (more steps might be included based on branch changes):
|
||||
|
||||
- **Metadata**: Pipeline metadata
|
||||
- **Pipeline setup**: Trigger async
|
||||
- **Image builds**: Build syntax-highlighter, Build symbols, Build Docker images, Build Docker images, Build Docker images, Build executor image, Build executor binary
|
||||
- **Image security scans**: Scan executor, Scan alpine-3.14, Scan postgres-12-alpine, Scan cadvisor, Scan codeinsights-db, Scan codeintel-db, Scan frontend, Scan github-proxy, Scan gitserver, Scan grafana, Scan indexed-searcher, Scan migrator, Scan node-exporter, Scan opentelemetry-collector, Scan postgres_exporter, Scan precise-code-intel-worker, Scan prometheus, Scan prometheus-gcp, Scan redis-cache, Scan redis-store, Scan redis_exporter, Scan repo-updater, Scan search-indexer, Scan searcher, Scan syntax-highlighter, Scan worker, Scan symbols, Scan batcheshelper, Scan blobstore2, Scan bundled-executor, Scan dind, Scan embeddings, Scan executor-kubernetes, Scan executor-vm, Scan jaeger-agent, Scan jaeger-all-in-one, Scan cody-gateway, Scan sg, Scan cody-slack
|
||||
- **Image builds**: Build Docker images, Build executor image, Build executor binary
|
||||
- Ensure buildfiles are up to date
|
||||
- Tests
|
||||
- BackCompat Tests
|
||||
- **Linters and static analysis**: Run sg lint
|
||||
- **Client checks**: Upload Storybook to Chromatic, Enterprise build, Build (client/jetbrains), Tests for VS Code extension, Unit and integration tests for the Cody VS Code extension, E2E tests for the Cody VS Code extension, Stylelint (all)
|
||||
- **Publish candidate images**: Push OCI/Wolfi Candidate
|
||||
- **Integration tests**: Backend integration tests (gRPC), Backend integration tests, Code Intel QA
|
||||
- **End-to-end tests**: Executors E2E, Sourcegraph E2E
|
||||
- **Publish images**: server, executor, alpine-3.14, postgres-12-alpine, blobstore, cadvisor, codeinsights-db, codeintel-db, frontend, github-proxy, gitserver, grafana, indexed-searcher, migrator, node-exporter, opentelemetry-collector, postgres_exporter, precise-code-intel-worker, prometheus, prometheus-gcp, redis-cache, redis-store, redis_exporter, repo-updater, search-indexer, searcher, syntax-highlighter, worker, symbols, batcheshelper, blobstore2, bundled-executor, dind, embeddings, executor-kubernetes, executor-vm, jaeger-agent, jaeger-all-in-one, cody-gateway, sg, cody-slack, Push OCI/Wolfi
|
||||
- **Publish candidate images**: Push candidate Images
|
||||
- **End-to-end tests**: Executors E2E
|
||||
- **Publish images**: executor-vm, alpine-3.14, codeinsights-db, codeintel-db, postgres-12-alpine, Push final images
|
||||
|
||||
### Patch image
|
||||
|
||||
@ -343,21 +335,6 @@ You can create a build of this run type for your changes using:
|
||||
sg ci build docker-images-patch-notest
|
||||
```
|
||||
|
||||
### Build all candidates without testing
|
||||
|
||||
The run type for branches matching `docker-images-candidates-notest/`.
|
||||
You can create a build of this run type for your changes using:
|
||||
|
||||
```sh
|
||||
sg ci build docker-images-candidates-notest
|
||||
```
|
||||
|
||||
Base pipeline (more steps might be included based on branch changes):
|
||||
|
||||
- **Metadata**: Pipeline metadata
|
||||
- **Image builds**: Build syntax-highlighter, Build symbols, Build Docker images, Build Docker images, Build Docker images
|
||||
- **Publish images**: Publish images
|
||||
|
||||
### Build executor without testing
|
||||
|
||||
The run type for branches matching `executor-patch-notest/`.
|
||||
@ -391,6 +368,13 @@ sg ci build backend-integration
|
||||
Base pipeline (more steps might be included based on branch changes):
|
||||
|
||||
- **Metadata**: Pipeline metadata
|
||||
- Build server
|
||||
- Backend integration tests (gRPC)
|
||||
- Backend integration tests
|
||||
- **Pipeline setup**: Trigger async
|
||||
- **Image builds**: Build Docker images
|
||||
- Ensure buildfiles are up to date
|
||||
- Tests
|
||||
- BackCompat Tests
|
||||
- **Linters and static analysis**: Run sg lint
|
||||
- **Client checks**: Upload Storybook to Chromatic, Enterprise build, Build (client/jetbrains), Tests for VS Code extension, Unit and integration tests for the Cody VS Code extension, E2E tests for the Cody VS Code extension, Stylelint (all)
|
||||
- **Publish candidate images**: Push candidate Images
|
||||
- **End-to-end tests**: Executors E2E
|
||||
- **Publish images**: executor-vm, alpine-3.14, codeinsights-db, codeintel-db, postgres-12-alpine, Push final images
|
||||
|
||||
@ -264,7 +264,6 @@ Supported run types when providing an argument for 'sg ci build [runtype]':
|
||||
* main-dry-run - Main dry run
|
||||
* docker-images-patch - Patch image
|
||||
* docker-images-patch-notest - Patch image without testing
|
||||
* docker-images-candidates-notest - Build all candidates without testing
|
||||
* executor-patch-notest - Build executor without testing
|
||||
* backend-integration - Backend integration tests
|
||||
|
||||
|
||||
11
docker-images/codeinsights-db/BUILD.bazel
generated
11
docker-images/codeinsights-db/BUILD.bazel
generated
@ -56,8 +56,9 @@ container_structure_test(
|
||||
],
|
||||
)
|
||||
|
||||
oci_push(
|
||||
name = "candidate_push",
|
||||
image = ":image",
|
||||
repository = image_repository("codeinsights-db"),
|
||||
)
|
||||
# RFC 793: We are not publishing those images for the 5.1 release, but will the next minor version.
|
||||
# oci_push(
|
||||
# name = "candidate_push",
|
||||
# image = ":image",
|
||||
# repository = image_repository("codeinsights-db"),
|
||||
# )
|
||||
|
||||
21
docker-images/codeinsights-db/README.md
Normal file
21
docker-images/codeinsights-db/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# codeinsights-db
|
||||
|
||||
This image provides a Postresql server for the _codeinsights_ database for Sourcegraph.
|
||||
|
||||
## Building and testing
|
||||
|
||||
This image comes in two flavours, the old alpine image, and the hardened one, built with Wolfi and Bazel.
|
||||
|
||||
### Alpine
|
||||
|
||||
As per [RFC 793]() Sourcegraph 5.1.0 is the last release we ship this variant to customers.
|
||||
|
||||
- Build: `./docker-images/codeinsights-db/build.sh`
|
||||
- Test: N/A
|
||||
|
||||
### Hardened
|
||||
|
||||
Please note that migrating from the Alpine image, this require a manual step to reindex the database. See the 5.1 upgrade documentation for details.
|
||||
|
||||
- Build: `bazel build //docker-images/codeinsights-db:image_tarball`
|
||||
- Test: `bazel build //docker-images/codeinsights-db:image_test`
|
||||
11
docker-images/codeintel-db/BUILD.bazel
generated
11
docker-images/codeintel-db/BUILD.bazel
generated
@ -25,8 +25,9 @@ container_structure_test(
|
||||
],
|
||||
)
|
||||
|
||||
oci_push(
|
||||
name = "candidate_push",
|
||||
image = ":image",
|
||||
repository = image_repository("codeintel-db"),
|
||||
)
|
||||
# RFC 793: We are not publishing those images for the 5.1 release, but will the next minor version.
|
||||
# oci_push(
|
||||
# name = "candidate_push",
|
||||
# image = ":image",
|
||||
# repository = image_repository("codeintel-db"),
|
||||
# )
|
||||
|
||||
21
docker-images/codeintel-db/README.md
Normal file
21
docker-images/codeintel-db/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# codeintel-db
|
||||
|
||||
This image provides a Postresql server for the _codeintel_ database for Sourcegraph.
|
||||
|
||||
## Building and testing
|
||||
|
||||
This image comes in two flavours, the old alpine image, and the hardened one, built with Wolfi and Bazel.
|
||||
|
||||
### Alpine
|
||||
|
||||
As per [RFC 793]() Sourcegraph 5.1.0 is the last release we ship this variant to customers.
|
||||
|
||||
- Build: `./docker-images/codeintel-db/build.sh`
|
||||
- Test: N/A
|
||||
|
||||
### Hardened
|
||||
|
||||
Please note that migrating from the Alpine image, this require a manual step to reindex the database. See the 5.1 upgrade documentation for details.
|
||||
|
||||
- Build: `bazel build //docker-images/codeintel-db:image_tarball`
|
||||
- Test: `bazel build //docker-images/codeintel-db:image_test`
|
||||
@ -31,7 +31,7 @@ fileExistenceTests:
|
||||
shouldExist: true
|
||||
uid: 10001
|
||||
gid: 0
|
||||
permissions: 'drwxrwxrwx'
|
||||
permissions: 'drwxr-xr-x'
|
||||
|
||||
metadataTest:
|
||||
envVars:
|
||||
|
||||
13
docker-images/postgres-12-alpine/BUILD.bazel
generated
13
docker-images/postgres-12-alpine/BUILD.bazel
generated
@ -3,8 +3,6 @@ load("@rules_pkg//:pkg.bzl", "pkg_tar")
|
||||
load("@container_structure_test//:defs.bzl", "container_structure_test")
|
||||
load("//dev:oci_defs.bzl", "image_repository")
|
||||
|
||||
# TODO move this to a different folder
|
||||
|
||||
filegroup(
|
||||
name = "config",
|
||||
srcs = glob(
|
||||
@ -60,8 +58,9 @@ container_structure_test(
|
||||
],
|
||||
)
|
||||
|
||||
oci_push(
|
||||
name = "candidate_push",
|
||||
image = ":image",
|
||||
repository = image_repository("postgres-12-alpine"), # TODO careful, this is not an alpine
|
||||
)
|
||||
# RFC 793: We are not publishing those images for the 5.1 release, but will the next minor version.
|
||||
# oci_push(
|
||||
# name = "candidate_push",
|
||||
# image = ":image",
|
||||
# repository = image_repository("postgres-12-alpine"), # TODO careful, this is not an alpine
|
||||
# )
|
||||
|
||||
21
docker-images/postgres-12-alpine/README.md
Normal file
21
docker-images/postgres-12-alpine/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Postgres-12 Alpine
|
||||
|
||||
This image provides a Postresql server for the _main_ database for Sourcegraph.
|
||||
|
||||
## Building and testing
|
||||
|
||||
This image comes in two flavours, the old alpine image, and the hardened one, built with Wolfi and Bazel.
|
||||
|
||||
### Alpine
|
||||
|
||||
As per [RFC 793]() Sourcegraph 5.1.0 is the last release we ship this variant to customers.
|
||||
|
||||
- Build: `./docker-images/postgres-12-alpine/build.sh`
|
||||
- Test: N/A
|
||||
|
||||
### Hardened
|
||||
|
||||
Please note that migrating from the Alpine image, this require a manual step to reindex the database. See the 5.1 upgrade documentation for details.
|
||||
|
||||
- Build: `bazel build //docker-images/postgres-12-alpine:image_tarball`
|
||||
- Test: `bazel build //docker-images/postgres-12-alpine:image_test`
|
||||
6
enterprise/cmd/batcheshelper/BUILD.bazel
generated
6
enterprise/cmd/batcheshelper/BUILD.bazel
generated
@ -42,16 +42,12 @@ go_test(
|
||||
pkg_tar(
|
||||
name = "tar_batcheshelper",
|
||||
srcs = [":batcheshelper"],
|
||||
package_dir = "/usr/local/bin",
|
||||
)
|
||||
|
||||
oci_image(
|
||||
name = "image",
|
||||
base = "@wolfi_batcheshelper_base",
|
||||
entrypoint = [
|
||||
"/sbin/tini",
|
||||
"--",
|
||||
"/batcheshelper",
|
||||
],
|
||||
tars = [":tar_batcheshelper"],
|
||||
)
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ schemaVersion: "2.0.0"
|
||||
|
||||
commandTests:
|
||||
- name: "binary is runnable"
|
||||
command: "/batcheshelper"
|
||||
command: "batcheshelper"
|
||||
envVars:
|
||||
- key: "SANITY_CHECK"
|
||||
value: "true"
|
||||
|
||||
7
enterprise/cmd/executor/BUILD.bazel
generated
7
enterprise/cmd/executor/BUILD.bazel
generated
@ -34,6 +34,12 @@ pkg_tar(
|
||||
srcs = [":executor"],
|
||||
)
|
||||
|
||||
pkg_tar(
|
||||
name = "tar_batcheshelper",
|
||||
srcs = ["//enterprise/cmd/batcheshelper"],
|
||||
package_dir = "/usr/local/bin",
|
||||
)
|
||||
|
||||
pkg_tar(
|
||||
name = "tar_src-cli",
|
||||
srcs = ["@src-cli-linux-amd64//:src-cli-linux-amd64"],
|
||||
@ -54,6 +60,7 @@ oci_image(
|
||||
tars = [
|
||||
":tar_executor",
|
||||
":tar_src-cli",
|
||||
":tar_batcheshelper",
|
||||
],
|
||||
user = "sourcegraph",
|
||||
)
|
||||
|
||||
@ -17,6 +17,11 @@ commandTests:
|
||||
command: "/usr/local/bin/src"
|
||||
args:
|
||||
- -v
|
||||
- name: "batcheshelper binary is runnable"
|
||||
command: "/usr/local/bin/batcheshelper"
|
||||
envVars:
|
||||
- key: "SANITY_CHECK"
|
||||
value: "true"
|
||||
|
||||
- name: "not running as root"
|
||||
command: "/usr/bin/id"
|
||||
|
||||
@ -38,13 +38,21 @@ func bazelCmd(args ...string) string {
|
||||
return strings.Join(Cmd, " ")
|
||||
}
|
||||
|
||||
func bazelPushImagesCandidates(version string) func(*bk.Pipeline) {
|
||||
return bazelPushImagesCmd(version, true)
|
||||
}
|
||||
|
||||
func bazelPushImagesFinal(version string) func(*bk.Pipeline) {
|
||||
return bazelPushImagesCmd(version, false)
|
||||
}
|
||||
|
||||
func bazelPushImagesCmd(version string, isCandidate bool) func(*bk.Pipeline) {
|
||||
stepName := ":bazel::docker: Push OCI/Wolfi"
|
||||
stepName := ":bazel::docker: Push final images"
|
||||
stepKey := "bazel-push-images"
|
||||
candidate := ""
|
||||
|
||||
if isCandidate {
|
||||
stepName = ":bazel::docker: Push OCI/Wolfi Candidate"
|
||||
stepName = ":bazel::docker: Push candidate Images"
|
||||
stepKey = stepKey + "-candidate"
|
||||
candidate = "true"
|
||||
}
|
||||
@ -438,7 +446,7 @@ func bazelPublishFinalDockerImage(c Config, apps []string) operations.Operation
|
||||
|
||||
var imgs []string
|
||||
for _, image := range []string{publishImage, devImage} {
|
||||
if app != "server" || c.RunType.Is(runtype.TaggedRelease, runtype.ImagePatch, runtype.ImagePatchNoTest, runtype.CandidatesNoTest) {
|
||||
if app != "server" || c.RunType.Is(runtype.TaggedRelease, runtype.ImagePatch, runtype.ImagePatchNoTest) {
|
||||
imgs = append(imgs, fmt.Sprintf("%s:%s", image, c.Version))
|
||||
}
|
||||
|
||||
|
||||
@ -240,7 +240,6 @@ func parseMessageFlags(msg string) MessageFlags {
|
||||
ProfilingEnabled: strings.Contains(msg, "[buildkite-enable-profiling]"),
|
||||
SkipHashCompare: strings.Contains(msg, "[skip-hash-compare]"),
|
||||
ForceReadyForReview: strings.Contains(msg, "[review-ready]"),
|
||||
NoBazel: strings.Contains(msg, "[no-bazel]"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -28,8 +28,6 @@ type CoreTestOperationsOptions struct {
|
||||
// for addWebAppOSSBuild
|
||||
CacheBundleSize bool
|
||||
CreateBundleSizeDiff bool
|
||||
// ForceBazel replaces vanilla jobs with Bazel ones if enabled.
|
||||
ForceBazel bool
|
||||
}
|
||||
|
||||
// CoreTestOperations is a core set of tests that should be run in most CI cases. More
|
||||
@ -45,10 +43,7 @@ type CoreTestOperationsOptions struct {
|
||||
func CoreTestOperations(diff changed.Diff, opts CoreTestOperationsOptions) *operations.Set {
|
||||
// Base set
|
||||
ops := operations.NewSet()
|
||||
|
||||
if opts.ForceBazel {
|
||||
ops.Append(BazelOperations()...)
|
||||
}
|
||||
ops.Append(BazelOperations()...)
|
||||
|
||||
// Simple, fast-ish linter checks
|
||||
linterOps := operations.NewNamedSet("Linters and static analysis")
|
||||
@ -58,60 +53,19 @@ func CoreTestOperations(diff changed.Diff, opts CoreTestOperationsOptions) *oper
|
||||
ops.Merge(linterOps)
|
||||
|
||||
if diff.Has(changed.Client | changed.GraphQL) {
|
||||
var clientChecks *operations.Set
|
||||
// TODO(Bazel) clean this once we go GA.
|
||||
if opts.ForceBazel {
|
||||
// If there are any Graphql changes, they are impacting the client as well.
|
||||
clientChecks = operations.NewNamedSet("Client checks",
|
||||
// clientIntegrationTests is now covered by Bazel
|
||||
// clientIntegrationTests,
|
||||
clientChromaticTests(opts),
|
||||
// frontendTests is now covered by Bazel
|
||||
// frontendTests, // ~4.5m
|
||||
// addWebAppOSSBuild is now covered by Bazel
|
||||
// addWebAppOSSBuild(opts),
|
||||
addWebAppEnterpriseBuild(opts),
|
||||
// addWebAppTests is now covered by Bazel
|
||||
// addWebAppTests(opts),
|
||||
// addBrowserExtensionsUnitTests is now covered by Bazel
|
||||
// addBrowserExtensionUnitTests, // ~4.5m
|
||||
addJetBrainsUnitTests, // ~2.5m
|
||||
// addTypescriptCheck is now covered by Bazel
|
||||
addVsceTests, // ~3.0m
|
||||
addCodyUnitIntegrationTests,
|
||||
addCodyE2ETests,
|
||||
// addESLint,
|
||||
addStylelint,
|
||||
)
|
||||
} else {
|
||||
// If there are any Graphql changes, they are impacting the client as well.
|
||||
clientChecks = operations.NewNamedSet("Client checks",
|
||||
clientIntegrationTests,
|
||||
clientChromaticTests(opts),
|
||||
frontendTests, // ~4.5m
|
||||
addWebAppOSSBuild(opts),
|
||||
addWebAppTests(opts),
|
||||
addBrowserExtensionUnitTests, // ~4.5m
|
||||
addJetBrainsUnitTests, // ~2.5m
|
||||
addTypescriptCheck, // ~4m
|
||||
addVsceTests, // ~3.0m
|
||||
addCodyUnitIntegrationTests,
|
||||
addCodyE2ETests,
|
||||
addESLint,
|
||||
addStylelint,
|
||||
)
|
||||
}
|
||||
|
||||
// If there are any Graphql changes, they are impacting the client as well.
|
||||
clientChecks := operations.NewNamedSet("Client checks",
|
||||
clientChromaticTests(opts),
|
||||
addWebAppEnterpriseBuild(opts),
|
||||
addJetBrainsUnitTests, // ~2.5m
|
||||
addVsceTests, // ~3.0m
|
||||
addCodyUnitIntegrationTests,
|
||||
addCodyE2ETests,
|
||||
addStylelint,
|
||||
)
|
||||
ops.Merge(clientChecks)
|
||||
}
|
||||
|
||||
if diff.Has(changed.Go|changed.GraphQL) && !opts.ForceBazel {
|
||||
// If there are any Graphql changes, they are impacting the backend as well.
|
||||
ops.Merge(operations.NewNamedSet("Go checks",
|
||||
addGoTests,
|
||||
addGoBuild))
|
||||
}
|
||||
|
||||
return ops
|
||||
}
|
||||
|
||||
@ -510,29 +464,6 @@ func addGoBuild(pipeline *bk.Pipeline) {
|
||||
)
|
||||
}
|
||||
|
||||
// Adds backend integration tests step.
|
||||
func backendIntegrationTests(candidateImageTag string, imageDep string) operations.Operation {
|
||||
return func(pipeline *bk.Pipeline) {
|
||||
for _, enableGRPC := range []bool{true, false} {
|
||||
description := ":bazel::docker: :chains: Backend integration tests"
|
||||
if enableGRPC {
|
||||
description += " (gRPC)"
|
||||
}
|
||||
pipeline.AddStep(
|
||||
description,
|
||||
// Run tests against the candidate server image
|
||||
bk.DependsOn(candidateImageStepKey(imageDep)),
|
||||
bk.AutomaticRetry(1), // TODO: @jhchabran, flaky, investigate
|
||||
bk.Env("IMAGE",
|
||||
images.DevRegistryImage("server", candidateImageTag)),
|
||||
bk.Env("SG_FEATURE_FLAG_GRPC", strconv.FormatBool(enableGRPC)),
|
||||
bk.Cmd("dev/ci/integration/backend/run.sh"),
|
||||
bk.ArtifactPaths("./*.log"),
|
||||
bk.Agent("queue", "bazel"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func addBrowserExtensionE2ESteps(pipeline *bk.Pipeline) {
|
||||
for _, browser := range []string{"chrome"} {
|
||||
// Run e2e tests
|
||||
@ -699,7 +630,7 @@ func executorsE2E(candidateTag string) operations.Operation {
|
||||
return func(p *bk.Pipeline) {
|
||||
p.AddStep(":bazel::docker::packer: Executors E2E",
|
||||
// Run tests against the candidate server image
|
||||
bk.DependsOn(candidateImageStepKey("server")),
|
||||
bk.DependsOn("bazel-push-images-candidate"),
|
||||
bk.Agent("queue", "bazel"),
|
||||
bk.Env("CANDIDATE_VERSION", candidateTag),
|
||||
bk.Env("SOURCEGRAPH_BASE_URL", "http://127.0.0.1:7080"),
|
||||
@ -716,25 +647,6 @@ func executorsE2E(candidateTag string) operations.Operation {
|
||||
}
|
||||
}
|
||||
|
||||
func serverE2E(candidateTag string) operations.Operation {
|
||||
return func(p *bk.Pipeline) {
|
||||
p.AddStep(":chromium: Sourcegraph E2E",
|
||||
// Run tests against the candidate server image
|
||||
bk.DependsOn(candidateImageStepKey("server")),
|
||||
bk.Env("CANDIDATE_VERSION", candidateTag),
|
||||
bk.Env("DISPLAY", ":99"),
|
||||
bk.Env("SOURCEGRAPH_BASE_URL", "http://127.0.0.1:7080"),
|
||||
bk.Env("SOURCEGRAPH_SUDO_USER", "admin"),
|
||||
bk.Env("TEST_USER_EMAIL", "test@sourcegraph.com"),
|
||||
bk.Env("TEST_USER_PASSWORD", "supersecurepassword"),
|
||||
bk.Env("INCLUDE_ADMIN_ONBOARDING", "false"),
|
||||
bk.AnnotatedCmd("dev/ci/integration/e2e/run.sh", bk.AnnotatedCmdOpts{
|
||||
Annotations: &bk.AnnotationOpts{},
|
||||
}),
|
||||
bk.ArtifactPaths("./*.png", "./*.mp4", "./*.log"))
|
||||
}
|
||||
}
|
||||
|
||||
func serverQA(candidateTag string) operations.Operation {
|
||||
return func(p *bk.Pipeline) {
|
||||
p.AddStep(":docker::chromium: Sourcegraph QA",
|
||||
@ -955,7 +867,7 @@ func publishFinalDockerImage(c Config, app string) operations.Operation {
|
||||
|
||||
var imgs []string
|
||||
for _, image := range []string{publishImage, devImage} {
|
||||
if app != "server" || c.RunType.Is(runtype.TaggedRelease, runtype.ImagePatch, runtype.ImagePatchNoTest, runtype.CandidatesNoTest) {
|
||||
if app != "server" || c.RunType.Is(runtype.TaggedRelease, runtype.ImagePatch, runtype.ImagePatchNoTest) {
|
||||
imgs = append(imgs, fmt.Sprintf("%s:%s", image, c.Version))
|
||||
}
|
||||
|
||||
|
||||
@ -16,6 +16,16 @@ import (
|
||||
"github.com/sourcegraph/sourcegraph/enterprise/dev/ci/internal/ci/operations"
|
||||
)
|
||||
|
||||
var legacyDockerImages = []string{
|
||||
"executor-vm",
|
||||
|
||||
// See RFC 793, those images will be dropped in 5.1.x.
|
||||
"alpine-3.14",
|
||||
"codeinsights-db",
|
||||
"codeintel-db",
|
||||
"postgres-12-alpine",
|
||||
}
|
||||
|
||||
// GeneratePipeline is the main pipeline generation function. It defines the build pipeline for each of the
|
||||
// main CI cases, which are defined in the main switch statement in the function.
|
||||
func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
@ -56,14 +66,6 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
}
|
||||
bk.FeatureFlags.ApplyEnv(env)
|
||||
|
||||
// If we detect the author to be a folk from Aspect.dev, force the Bazel flag.
|
||||
// This is to avoid incorrectly assuming that the CI will run Bazel task and
|
||||
// missing regressions being introduced in a PR.
|
||||
authorEmail := os.Getenv("BUILDKITE_BUILD_AUTHOR_EMAIL")
|
||||
if strings.HasSuffix(authorEmail, "@aspect.dev") {
|
||||
c.MessageFlags.NoBazel = false
|
||||
}
|
||||
|
||||
// On release branches Percy must compare to the previous commit of the release branch, not main.
|
||||
if c.RunType.Is(runtype.ReleaseBranch, runtype.TaggedRelease) {
|
||||
env["PERCY_TARGET_BRANCH"] = c.Branch
|
||||
@ -174,7 +176,6 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
MinimumUpgradeableVersion: minimumUpgradeableVersion,
|
||||
ForceReadyForReview: c.MessageFlags.ForceReadyForReview,
|
||||
CreateBundleSizeDiff: true,
|
||||
ForceBazel: !c.MessageFlags.NoBazel,
|
||||
}))
|
||||
|
||||
// Now we set up conditional operations that only apply to pull requests.
|
||||
@ -188,12 +189,6 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
case runtype.ReleaseNightly:
|
||||
ops.Append(triggerReleaseBranchHealthchecks(minimumUpgradeableVersion))
|
||||
|
||||
case runtype.BackendIntegrationTests:
|
||||
ops.Append(
|
||||
bazelBuildCandidateDockerImage("server", c.Version, c.candidateImageTag(), c.RunType),
|
||||
backendIntegrationTests(c.candidateImageTag(), "server"),
|
||||
)
|
||||
|
||||
case runtype.BextReleaseBranch:
|
||||
// If this is a browser extension release branch, run the browser-extension tests and
|
||||
// builds.
|
||||
@ -291,29 +286,6 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
bazelBuildCandidateDockerImage(patchImage, c.Version, c.candidateImageTag(), c.RunType),
|
||||
wait,
|
||||
publishFinalDockerImage(c, patchImage))
|
||||
|
||||
case runtype.CandidatesNoTest:
|
||||
imageBuildOps := operations.NewNamedSet("Image builds")
|
||||
imageBuildOps.Append(buildCandidateDockerImage("syntax-highlighter", c.Version, c.candidateImageTag(), false))
|
||||
imageBuildOps.Append(buildCandidateDockerImage("symbols", c.Version, c.candidateImageTag(), false))
|
||||
imageBuildOps.Append(bazelBuildCandidateDockerImages(images.SourcegraphDockerImagesTestDeps, c.Version, c.candidateImageTag(), c.RunType))
|
||||
var deployImages = []string{}
|
||||
for _, image := range images.DeploySourcegraphDockerImages {
|
||||
if image == "syntax-highlighter" || image == "symbols" {
|
||||
continue
|
||||
}
|
||||
deployImages = append(deployImages, image)
|
||||
}
|
||||
imageBuildOps.Append(bazelBuildCandidateDockerImages(deployImages, c.Version, c.candidateImageTag(), c.RunType))
|
||||
imageBuildOps.Append(bazelBuildCandidateDockerImages(images.SourcegraphDockerImagesMisc, c.Version, c.candidateImageTag(), c.RunType))
|
||||
ops.Merge(imageBuildOps)
|
||||
ops.Append(wait)
|
||||
|
||||
publishOps := operations.NewNamedSet("Publish images")
|
||||
publishOps.Append(bazelPublishFinalDockerImage(c, images.SourcegraphDockerImages))
|
||||
|
||||
ops.Merge(publishOps)
|
||||
|
||||
case runtype.ExecutorPatchNoTest:
|
||||
executorVMImage := "executor-vm"
|
||||
ops = operations.NewSet(
|
||||
@ -338,39 +310,8 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
skipHashCompare := c.MessageFlags.SkipHashCompare || c.RunType.Is(runtype.ReleaseBranch, runtype.TaggedRelease) || c.Diff.Has(changed.ExecutorVMImage)
|
||||
// Slow image builds
|
||||
imageBuildOps := operations.NewNamedSet("Image builds")
|
||||
if c.MessageFlags.NoBazel {
|
||||
for _, dockerImage := range images.SourcegraphDockerImages {
|
||||
// Only upload sourcemaps for the "frontend" image, on the Main branch build
|
||||
uploadSourcemaps := false
|
||||
if c.RunType.Is(runtype.MainBranch) && dockerImage == "frontend" {
|
||||
uploadSourcemaps = true
|
||||
}
|
||||
imageBuildOps.Append(buildCandidateDockerImage(dockerImage, c.Version, c.candidateImageTag(), uploadSourcemaps))
|
||||
}
|
||||
// Executor VM image
|
||||
// skipHashCompare := c.MessageFlags.SkipHashCompare || c.RunType.Is(runtype.ReleaseBranch, runtype.TaggedRelease) || c.Diff.Has(changed.ExecutorVMImage)
|
||||
if c.RunType.Is(runtype.MainDryRun, runtype.MainBranch, runtype.ReleaseBranch, runtype.TaggedRelease) {
|
||||
imageBuildOps.Append(buildExecutorVM(c, skipHashCompare))
|
||||
imageBuildOps.Append(buildExecutorBinary(c))
|
||||
if c.RunType.Is(runtype.ReleaseBranch, runtype.TaggedRelease) || c.Diff.Has(changed.ExecutorDockerRegistryMirror) {
|
||||
imageBuildOps.Append(buildExecutorDockerMirror(c))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
imageBuildOps.Append(buildCandidateDockerImage("syntax-highlighter", c.Version, c.candidateImageTag(), false))
|
||||
imageBuildOps.Append(buildCandidateDockerImage("symbols", c.Version, c.candidateImageTag(), false))
|
||||
imageBuildOps.Append(bazelBuildCandidateDockerImages(images.SourcegraphDockerImagesTestDeps, c.Version, c.candidateImageTag(), c.RunType))
|
||||
var deployImages = []string{}
|
||||
for _, image := range images.DeploySourcegraphDockerImages {
|
||||
if image == "syntax-highlighter" || image == "symbols" {
|
||||
continue
|
||||
}
|
||||
deployImages = append(deployImages, image)
|
||||
}
|
||||
imageBuildOps.Append(bazelBuildCandidateDockerImages(deployImages, c.Version, c.candidateImageTag(), c.RunType))
|
||||
imageBuildOps.Append(bazelBuildCandidateDockerImages(images.SourcegraphDockerImagesMisc, c.Version, c.candidateImageTag(), c.RunType))
|
||||
imageBuildOps.Append(bazelBuildCandidateDockerImages(legacyDockerImages, c.Version, c.candidateImageTag(), c.RunType))
|
||||
|
||||
}
|
||||
if c.RunType.Is(runtype.MainDryRun, runtype.MainBranch, runtype.ReleaseBranch, runtype.TaggedRelease) {
|
||||
imageBuildOps.Append(buildExecutorVM(c, skipHashCompare))
|
||||
imageBuildOps.Append(buildExecutorBinary(c))
|
||||
@ -380,38 +321,22 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
}
|
||||
ops.Merge(imageBuildOps)
|
||||
|
||||
// Trivy security scans
|
||||
imageScanOps := operations.NewNamedSet("Image security scans")
|
||||
for _, dockerImage := range images.SourcegraphDockerImages {
|
||||
imageScanOps.Append(trivyScanCandidateImage(dockerImage, c.candidateImageTag()))
|
||||
}
|
||||
ops.Merge(imageScanOps)
|
||||
|
||||
// Core tests
|
||||
ops.Merge(CoreTestOperations(changed.All, CoreTestOperationsOptions{
|
||||
ChromaticShouldAutoAccept: c.RunType.Is(runtype.MainBranch, runtype.ReleaseBranch, runtype.TaggedRelease),
|
||||
MinimumUpgradeableVersion: minimumUpgradeableVersion,
|
||||
ForceReadyForReview: c.MessageFlags.ForceReadyForReview,
|
||||
CacheBundleSize: c.RunType.Is(runtype.MainBranch, runtype.MainDryRun),
|
||||
ForceBazel: !c.MessageFlags.NoBazel,
|
||||
}))
|
||||
|
||||
// Publish candidate images to dev registry
|
||||
publishOpsDev := operations.NewNamedSet("Publish candidate images")
|
||||
publishOpsDev.Append(bazelPushImagesCmd(c.Version, true))
|
||||
publishOpsDev.Append(bazelPushImagesCandidates(c.Version))
|
||||
ops.Merge(publishOpsDev)
|
||||
|
||||
// Integration tests
|
||||
// Temporary: on main branches, we build images with bazel binaries based on their toolchain and/or purpose. This step key is the first image in the array.
|
||||
// This will be removed once we build images with wolfi.
|
||||
ops.Merge(operations.NewNamedSet("Integration tests",
|
||||
backendIntegrationTests(c.candidateImageTag(), "server"),
|
||||
codeIntelQA(c.candidateImageTag()),
|
||||
))
|
||||
// End-to-end tests
|
||||
ops.Merge(operations.NewNamedSet("End-to-end tests",
|
||||
executorsE2E(c.candidateImageTag()),
|
||||
serverE2E(c.candidateImageTag()),
|
||||
// testUpgrade(c.candidateImageTag(), minimumUpgradeableVersion),
|
||||
))
|
||||
|
||||
@ -421,7 +346,7 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
// Add final artifacts
|
||||
publishOps := operations.NewNamedSet("Publish images")
|
||||
// Add final artifacts
|
||||
for _, dockerImage := range images.SourcegraphDockerImages {
|
||||
for _, dockerImage := range legacyDockerImages {
|
||||
publishOps.Append(publishFinalDockerImage(c, dockerImage))
|
||||
}
|
||||
// Executor VM image
|
||||
@ -433,7 +358,7 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
}
|
||||
}
|
||||
// Final Bazel images
|
||||
publishOps.Append(bazelPushImagesCmd(c.Version, false))
|
||||
publishOps.Append(bazelPushImagesFinal(c.Version))
|
||||
ops.Merge(publishOps)
|
||||
}
|
||||
|
||||
|
||||
@ -8,7 +8,7 @@ function preview_tags() {
|
||||
|
||||
for tag in "${tags[@]}"; do
|
||||
for registry in "${registries[@]}"; do
|
||||
echo -e "\t ${registry}/\$IMAGE:${qa_prefix}-${tag}"
|
||||
echo -e "\t ${registry}/\$IMAGE:${tag}"
|
||||
done
|
||||
done
|
||||
}
|
||||
@ -45,8 +45,6 @@ prod_registries=(
|
||||
|
||||
date_fragment="$(date +%Y-%m-%d)"
|
||||
|
||||
qa_prefix="bazel"
|
||||
|
||||
dev_tags=(
|
||||
"${BUILDKITE_COMMIT:0:12}"
|
||||
"${BUILDKITE_COMMIT:0:12}_${date_fragment}"
|
||||
@ -87,12 +85,12 @@ echo "--- done"
|
||||
|
||||
dev_tags_args=""
|
||||
for t in "${dev_tags[@]}"; do
|
||||
dev_tags_args="$dev_tags_args --tag ${qa_prefix}-${t}"
|
||||
dev_tags_args="$dev_tags_args --tag ${t}"
|
||||
done
|
||||
prod_tags_args=""
|
||||
if $push_prod; then
|
||||
for t in "${prod_tags[@]}"; do
|
||||
prod_tags_args="$prod_tags_args --tag ${qa_prefix}-${t}"
|
||||
prod_tags_args="$prod_tags_args --tag ${t}"
|
||||
done
|
||||
fi
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user