mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 19:51:50 +00:00
bzl: rework how we populate the database schemas for migrator (#57591)
Co-authored-by: William Bezuidenhout <william.bezuidenhout@sourcegraph.com>
This commit is contained in:
parent
dc5fc43d22
commit
e8919ada26
@ -415,3 +415,7 @@ container_structure_test_register_toolchain(name = "cst")
|
||||
load("//dev:tool_deps.bzl", "tool_deps")
|
||||
|
||||
tool_deps()
|
||||
|
||||
load("//tools/release:schema_deps.bzl", "schema_deps")
|
||||
|
||||
schema_deps()
|
||||
|
||||
@ -29,317 +29,21 @@ go_binary(
|
||||
},
|
||||
)
|
||||
|
||||
# See tools/release/README.md for details.
|
||||
genrule(
|
||||
name = "schema_descriptions",
|
||||
srcs = ["generate.sh"],
|
||||
outs = [
|
||||
"schema-descriptions/v3.20.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.20.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.21.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.21.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.21.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.21.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.21.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.21.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.22.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.22.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.22.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.22.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.23.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.23.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.24.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.24.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.24.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.24.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.25.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.25.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.25.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.25.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.25.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.25.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.25.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.25.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.25.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.26.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.26.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.26.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.26.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.26.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.26.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.26.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.26.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.26.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.26.3-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.26.3-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.26.3-internal_database_schema.json",
|
||||
"schema-descriptions/v3.27.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.27.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.27.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.27.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.27.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.27.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.27.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.27.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.27.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.27.3-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.27.3-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.27.3-internal_database_schema.json",
|
||||
"schema-descriptions/v3.27.4-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.27.4-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.27.4-internal_database_schema.json",
|
||||
"schema-descriptions/v3.27.5-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.27.5-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.27.5-internal_database_schema.json",
|
||||
"schema-descriptions/v3.28.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.28.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.28.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.29.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.29.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.29.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.29.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.29.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.29.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.30.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.30.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.30.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.30.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.30.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.30.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.30.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.30.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.30.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.30.3-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.30.3-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.30.3-internal_database_schema.json",
|
||||
"schema-descriptions/v3.30.4-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.30.4-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.30.4-internal_database_schema.json",
|
||||
"schema-descriptions/v3.31.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.31.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.31.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.31.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.31.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.31.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.31.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.31.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.31.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.32.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.32.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.32.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.32.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.32.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.32.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.33.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.33.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.33.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.33.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.33.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.33.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.33.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.33.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.33.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.34.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.34.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.34.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.34.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.34.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.34.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.34.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.34.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.34.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.35.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.35.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.35.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.35.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.35.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.35.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.35.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.35.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.35.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.36.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.36.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.36.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.36.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.36.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.36.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.36.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.36.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.36.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.36.3-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.36.3-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.36.3-internal_database_schema.json",
|
||||
"schema-descriptions/v3.37.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.37.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.37.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.38.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.38.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.38.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.38.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.38.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.38.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.39.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.39.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.39.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.39.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.39.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.39.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.40.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.40.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.40.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.40.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.40.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.40.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.40.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.40.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.40.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.41.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.41.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.41.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.41.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.41.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.41.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.42.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.42.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.42.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.42.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.42.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.42.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.42.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.42.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.42.2-internal_database_schema.json",
|
||||
"schema-descriptions/v3.43.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.43.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.43.0-internal_database_schema.json",
|
||||
"schema-descriptions/v3.43.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.43.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.43.1-internal_database_schema.json",
|
||||
"schema-descriptions/v3.43.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v3.43.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v3.43.2-internal_database_schema.json",
|
||||
"schema-descriptions/v4.0.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.0.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.0.0-internal_database_schema.json",
|
||||
"schema-descriptions/v4.0.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.0.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.0.1-internal_database_schema.json",
|
||||
"schema-descriptions/v4.1.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.1.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.1.0-internal_database_schema.json",
|
||||
"schema-descriptions/v4.1.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.1.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.1.1-internal_database_schema.json",
|
||||
"schema-descriptions/v4.1.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.1.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.1.2-internal_database_schema.json",
|
||||
"schema-descriptions/v4.1.3-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.1.3-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.1.3-internal_database_schema.json",
|
||||
"schema-descriptions/v4.2.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.2.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.2.0-internal_database_schema.json",
|
||||
"schema-descriptions/v4.2.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.2.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.2.1-internal_database_schema.json",
|
||||
"schema-descriptions/v4.3.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.3.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.3.0-internal_database_schema.json",
|
||||
"schema-descriptions/v4.3.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.3.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.3.1-internal_database_schema.json",
|
||||
"schema-descriptions/v4.4.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.4.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.4.0-internal_database_schema.json",
|
||||
"schema-descriptions/v4.4.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.4.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.4.1-internal_database_schema.json",
|
||||
"schema-descriptions/v4.4.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.4.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.4.2-internal_database_schema.json",
|
||||
"schema-descriptions/v4.5.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.5.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.5.0-internal_database_schema.json",
|
||||
"schema-descriptions/v4.5.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v4.5.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v4.5.1-internal_database_schema.json",
|
||||
"schema-descriptions/v5.0.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.0.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.0.0-internal_database_schema.json",
|
||||
"schema-descriptions/v5.0.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.0.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.0.1-internal_database_schema.json",
|
||||
"schema-descriptions/v5.0.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.0.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.0.2-internal_database_schema.json",
|
||||
"schema-descriptions/v5.0.3-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.0.3-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.0.3-internal_database_schema.json",
|
||||
"schema-descriptions/v5.0.4-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.0.4-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.0.4-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.0-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.1-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.2-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.2-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.2-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.3-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.3-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.3-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.4-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.4-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.4-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.5-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.5-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.5-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.6-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.6-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.6-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.7-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.7-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.7-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.8-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.8-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.8-internal_database_schema.json",
|
||||
"schema-descriptions/v5.1.9-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.1.9-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.1.9-internal_database_schema.json",
|
||||
"schema-descriptions/v5.2.0-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.2.0-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.2.0-internal_database_schema.json",
|
||||
"schema-descriptions/v5.2.1-internal_database_schema.codeinsights.json",
|
||||
"schema-descriptions/v5.2.1-internal_database_schema.codeintel.json",
|
||||
"schema-descriptions/v5.2.1-internal_database_schema.json",
|
||||
],
|
||||
cmd = "$(location generate.sh) $(@D)",
|
||||
tags = ["requires-network"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
sh_test(
|
||||
name = "schema_descriptions_test",
|
||||
size = "small",
|
||||
srcs = [
|
||||
"schema_descriptions_test.sh",
|
||||
],
|
||||
args = [
|
||||
"$(location generate.sh)",
|
||||
"$(locations :schema_descriptions)",
|
||||
],
|
||||
data = [
|
||||
"generate.sh",
|
||||
":schema_descriptions",
|
||||
],
|
||||
tags = ["requires-network"],
|
||||
)
|
||||
|
||||
pkg_tar(
|
||||
name = "tar_schema_descriptions",
|
||||
srcs = [":schema_descriptions"],
|
||||
package_dir = "schema-descriptions",
|
||||
visibility = ["//cmd/migrator:__pkg__"],
|
||||
srcs = ["@schemas_archive//file"],
|
||||
outs = ["schema_descriptions.tar"],
|
||||
cmd = """\
|
||||
mkdir -p schema-descriptions/
|
||||
tar zxf $(location @schemas_archive//file:file) --no-same-owner -C schema-descriptions/
|
||||
|
||||
if tar --version | grep -q bsdtar; then
|
||||
tar -cf $@ --uid=0 --gid=0 --numeric-owner schema-descriptions/
|
||||
else
|
||||
tar -cf $@ --owner=:0 --group=:0 --numeric-owner schema-descriptions/
|
||||
fi
|
||||
""",
|
||||
)
|
||||
|
||||
pkg_tar(
|
||||
|
||||
@ -1,85 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script generates all the schema-descriptions files.
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../.."
|
||||
set -eu
|
||||
|
||||
OUTPUT="$1"
|
||||
|
||||
echo "Compiling schema descriptions ..."
|
||||
mkdir -p "${OUTPUT}/schema-descriptions"
|
||||
|
||||
# See internal/database/migration/cliutil/drift-schemas/generate-all.sh
|
||||
gcs_versions=(
|
||||
v3.20.0 v3.20.1
|
||||
v3.21.0 v3.21.1 v3.21.2
|
||||
v3.22.0 v3.22.1
|
||||
v3.23.0
|
||||
v3.24.0 v3.24.1
|
||||
v3.25.0 v3.25.1 v3.25.2
|
||||
v3.26.0 v3.26.1 v3.26.2 v3.26.3
|
||||
v3.27.0 v3.27.1 v3.27.2 v3.27.3 v3.27.4 v3.27.5
|
||||
v3.28.0
|
||||
v3.29.0 v3.29.1
|
||||
v3.30.0 v3.30.1 v3.30.2 v3.30.3 v3.30.4
|
||||
v3.31.0 v3.31.1 v3.31.2
|
||||
v3.32.0 v3.32.1
|
||||
v3.33.0 v3.33.1 v3.33.2
|
||||
v3.34.0 v3.34.1 v3.34.2
|
||||
v3.35.0 v3.35.1 v3.35.2
|
||||
v3.36.0 v3.36.1 v3.36.2 v3.36.3
|
||||
v3.37.0
|
||||
v3.38.0 v3.38.1
|
||||
v3.39.0 v3.39.1
|
||||
v3.40.0 v3.40.1 v3.40.2
|
||||
v3.41.0 v3.41.1
|
||||
)
|
||||
gcs_filenames=(
|
||||
internal_database_schema.json
|
||||
internal_database_schema.codeintel.json
|
||||
internal_database_schema.codeinsights.json
|
||||
)
|
||||
|
||||
function download_gcs() {
|
||||
outfile="${OUTPUT}/schema-descriptions/${1}-${2}"
|
||||
# 3.20.0 is missing the codeintel and codeinsights schemas.
|
||||
if ! curl -fsSL "https://storage.googleapis.com/sourcegraph-assets/migrations/drift/${1}-${2}" 2>/dev/null >"${outfile}"; then
|
||||
rm "${outfile}"
|
||||
fi
|
||||
}
|
||||
|
||||
for version in "${gcs_versions[@]}"; do
|
||||
for filename in "${gcs_filenames[@]}"; do
|
||||
download_gcs "${version}" "${filename}"
|
||||
done
|
||||
done
|
||||
|
||||
function download_github() {
|
||||
local version
|
||||
version="$1"
|
||||
local github_url
|
||||
github_url="https://raw.githubusercontent.com/sourcegraph/sourcegraph/${version}/internal/database"
|
||||
|
||||
curl -fsSL "$github_url/schema.json" >"${OUTPUT}/schema-descriptions/${version}-internal_database_schema.json"
|
||||
curl -fsSL "$github_url/schema.codeintel.json" >"${OUTPUT}/schema-descriptions/${version}-internal_database_schema.codeintel.json"
|
||||
curl -fsSL "$github_url/schema.codeinsights.json" >"${OUTPUT}/schema-descriptions/${version}-internal_database_schema.codeinsights.json"
|
||||
}
|
||||
|
||||
git_versions=(
|
||||
v3.42.0 v3.42.1 v3.42.2
|
||||
v3.43.0 v3.43.1 v3.43.2
|
||||
v4.0.0 v4.0.1
|
||||
v4.1.0 v4.1.1 v4.1.2 v4.1.3
|
||||
v4.2.0 v4.2.1
|
||||
v4.3.0 v4.3.1
|
||||
v4.4.0 v4.4.1 v4.4.2
|
||||
v4.5.0 v4.5.1
|
||||
v5.0.0 v5.0.1 v5.0.2 v5.0.3 v5.0.4
|
||||
v5.1.0 v5.1.1 v5.1.2 v5.1.3 v5.1.4 v5.1.5 v5.1.6 v5.1.7 v5.1.8 v5.1.9
|
||||
v5.2.0
|
||||
v5.2.1)
|
||||
|
||||
for version in "${git_versions[@]}"; do
|
||||
download_github "${version}"
|
||||
done
|
||||
@ -1,40 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Path to the schema_descriptions tool
|
||||
generate_bin="$1"
|
||||
|
||||
# Array of paths for each of the outputs from the :generate_config target.
|
||||
# shellcheck disable=SC2124
|
||||
got_files="${@:2}"
|
||||
|
||||
# Manually run the script again, so have a list of all the files
|
||||
# we expect the :schema_descriptions target to output.
|
||||
#
|
||||
# We put them in the ./expected folder.
|
||||
"$generate_bin" expected/
|
||||
|
||||
# Loop over all of them and check if we can find each of them in the
|
||||
# outputs from :schema_descriptions target.
|
||||
for file in expected/**/*; do
|
||||
# Trim the "expected" part of the path
|
||||
want="${file##expected}"
|
||||
found="false"
|
||||
|
||||
# Loop over all files we got.
|
||||
# shellcheck disable=SC2068
|
||||
for got in ${got_files[@]}; do
|
||||
# Trim the path from the "monitoring/output" prefix
|
||||
# and test it against the expected file we're currently iterating with.
|
||||
if [[ "${got##cmd/migrator}" == "$want" ]]; then
|
||||
found="true"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# If we didn't find it, return an error.
|
||||
if [[ $found == "false" ]]; then
|
||||
echo "Couldn't find expected output $want, perhaps it's missing from the 'srcs' attribute?"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
@ -373,3 +373,11 @@ write_generated_to_source_files(
|
||||
},
|
||||
target = ":generate_schemas",
|
||||
)
|
||||
|
||||
exports_files(
|
||||
srcs = [
|
||||
"schema.codeinsights.json",
|
||||
"schema.codeintel.json",
|
||||
"schema.json",
|
||||
],
|
||||
)
|
||||
|
||||
19
tools/release/BUILD.bazel
Normal file
19
tools/release/BUILD.bazel
Normal file
@ -0,0 +1,19 @@
|
||||
sh_binary(
|
||||
name = "generate_schemas_archive",
|
||||
srcs = ["generate_schemas_archive.sh"],
|
||||
data = [
|
||||
"//internal/database:schema.codeinsights.json",
|
||||
"//internal/database:schema.codeintel.json",
|
||||
"//internal/database:schema.json",
|
||||
],
|
||||
)
|
||||
|
||||
sh_binary(
|
||||
name = "upload_current_schemas",
|
||||
srcs = ["upload_current_schemas.sh"],
|
||||
data = [
|
||||
"//internal/database:schema.codeinsights.json",
|
||||
"//internal/database:schema.codeintel.json",
|
||||
"//internal/database:schema.json",
|
||||
],
|
||||
)
|
||||
198
tools/release/README.md
Normal file
198
tools/release/README.md
Normal file
@ -0,0 +1,198 @@
|
||||
# Release tooling
|
||||
|
||||
## Usage
|
||||
|
||||
### Generating a database schemas tarball
|
||||
|
||||
Generating a database schemas tarball is achieved by downloading all known schemas plus the current schema. There are two options for the current version database schemas: either we are cutting a new release and we need to inject the current one, or we are regenerating the tarball to fix a problem.
|
||||
|
||||
To control which approach we take, we use the second parameter of the following command:
|
||||
|
||||
```
|
||||
bazel run //tools/release:generate_schemas_archive -- vX.Y.Z [ACTION] $HOME/[PATH-TO-YOUR-REPO]
|
||||
```
|
||||
|
||||
If ran with `fetch-current-schemas`, the script will ensure that the schemas archive in the bucket correctly
|
||||
contains the given version database schemas. It will also prompt the user for confirmation if the associated
|
||||
tarball with that version exists in the bucket.
|
||||
|
||||
If ran with `inject-current-schemas`, the script will ensure that the schemas archive in the bucket doesn't
|
||||
contain the schemas for the new version and will instead create them by injecting the `internal/database/schemas*.json` schemas into the tarball, properly renamed to the expected convention.
|
||||
|
||||
Finally, in both cases, the tarball will be uploaded in the bucket, and the third party dependency, located in
|
||||
`tools/release/schema_deps.bzl` will be updated accordingly, allowing builds past that point to use those schemas.
|
||||
|
||||
### Uploading the current database schemas
|
||||
|
||||
Once a release is considered to be correct (upcoming in RFC 795) the release tooling runs another command
|
||||
to store the current database schemas in the bucket, under the `schemas` folder, to capture how the database
|
||||
looks at that point.
|
||||
|
||||
This enables to build migrator binaries that will be able to use that particular release as a migration point.
|
||||
|
||||
```
|
||||
bazel run //tools/release:upload_current_schemas -- vX.Y.Z
|
||||
```
|
||||
|
||||
The script will ensure that there are no existing database schemas for that version before uploading anything. This way
|
||||
we prevent accidentally breaking previously generated database schemas.
|
||||
|
||||
## Database schemas
|
||||
|
||||
Database schemas are necessary for Multi-Version Upgrades, so we need to populate
|
||||
them when building and cutting new releases.
|
||||
|
||||
The following diagram provides an overview of how it works.
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────┐
|
||||
│ GCS Bucket │
|
||||
│ │
|
||||
│ ┌───────────────────────────────────┐ │
|
||||
│ │ schemas/ │ │
|
||||
│ │ v3.1.4-(...).schema.json │ │
|
||||
│ │ ... │ │
|
||||
│ │ v5.2.1234-(...).schema.json ◄────┼──┼─────── Uploaded on a successful
|
||||
│ │ │ │ new release build
|
||||
│ │ │ │
|
||||
│ │ │ │
|
||||
│ └───────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌───────────────────────────────────┐ │
|
||||
│ │ dist/ │ │
|
||||
│ │ schemas-v5.2.1093.tar.gz │ │
|
||||
│ │ schemas-v5.2.1234.tar.gz ◄───────┼──┼────── Uploaded at the beginning of
|
||||
│ │ ▲ │ │ of a new release build.
|
||||
│ │ │ │ │
|
||||
│ │ │ │ │ Release build automatically
|
||||
│ │ │ │ │ update the Bazel reference
|
||||
│ │ │ │ │ to that file.
|
||||
│ └─────────────┼─────────────────────┘ │
|
||||
│ │ │ Gets deleted if the release
|
||||
│ │ │ build fails.
|
||||
│ │ │
|
||||
└────────────────┼────────────────────────┘
|
||||
│
|
||||
│
|
||||
│
|
||||
referenced by Bazel and used to
|
||||
populate the schemas when building
|
||||
the cmd/migrator Docker container.
|
||||
```
|
||||
|
||||
There are two distinct scenarios:
|
||||
|
||||
1. Normal builds
|
||||
2. Release builds
|
||||
|
||||
When doing a normal build, we simply use the schemas tarball that has been previously
|
||||
set by the last release build. It contains all knowns schema descriptions that existed
|
||||
at that time.
|
||||
|
||||
Now when doing a release build, we need to refresh the schema descriptions, because patch releases
|
||||
might have been publicly released, meaning those schemas now exist in the wild, on customer deployments
|
||||
or cloud.
|
||||
|
||||
Let's use a concrete example:
|
||||
|
||||
1. t=0 5.1.0 has been released publicly
|
||||
- `main` branch is now considered to be 5.2.0
|
||||
- `5.1` branch is the target for PRs for backports and bug fixes.
|
||||
1. t=10 5.1.2222 has been released publicly
|
||||
- `5.1` branch is from where this release was cut.
|
||||
1. t=20 5.2.0 has been released publicly
|
||||
- `main` branch is now considered to be 5.3.0
|
||||
- `5.2` branch is the target for PRs for backports and bug fixes.
|
||||
1. t=30 5.1.3333 has been released publicly
|
||||
- `5.1` branch is from where this release was cut.
|
||||
|
||||
So with that scenario, when 5.1.3333 has been released, we introduced a new version that the _migrator_ must be aware of, on both `main` and the `5.1` branch. Previously, this required us to make a PR to port to main, the 5.1 branch references
|
||||
to the new 5.1.3333 schemas. See [this PR for a real example](https://github.com/sourcegraph/sourcegraph/pull/56405/files#diff-38f26d6e9cb950b24ced060cd86effd4363b313d880d1afad1850887eabaf238R79).
|
||||
|
||||
Failing to do this, would mean the _migrator_ we're going to ship on the next 5.2 release will not cover the migration path from 5.1.3333 when doing multi-version upgrades.
|
||||
|
||||
Ultimately, this means that when a release cut is at any point in time, you need to be aware of all previously released
|
||||
version, even if they were released on the previous minor release. Instead of having to remember to enact those changes,
|
||||
we can take a different approach.
|
||||
|
||||
The GCS bucket has two folders: `schemas/` and `dist/`. `schemas/` is the source of truth for all known schemas up until now, regardless of the current version. Whenever a new release is cut, the new schemas are added in that folder. Therefore, when doing the next release cut, we will use that folder to populate all the schemas that _migrator_ needs to be aware of, without having to make any manual change in the code.
|
||||
|
||||
Now, when building the _migrator_, we can't directly use the GCS bucket. Bazel wants a deterministic set of inputs and "all content from the bucket" is not deterministic.
|
||||
To satisfy Bazel, we need a fixed input, checksumed, to guarantee that the build is stable. So when we're creating a new release, we simply regenerate that
|
||||
tarball based on the schemas we find in the bucket, under `schemas/` and upload it under `dist/`.
|
||||
|
||||
Step by step process (work-in-progress):
|
||||
|
||||
1. We want to create a new release, which is materialized by a pull-request automatically created by `sg`
|
||||
1. `sg release create ...` runs `bazel run //tools/release:generate_schemas_archive -- v5.3.4444`
|
||||
1. it fetches all schemas whose version are below 5.3.4444
|
||||
1. it copies the current `schema.*.json` files to `v5.3.4444-internal_database.schema.*.json`, to match the convention of the other schemas.
|
||||
1. it creates a tarball named `schemas-v5.3.4444.tar.gz`
|
||||
1. it uploads it under the `dist/` folder in the bucket.
|
||||
1. it updates `tools/release/schema_deps.bzl` with the new tarball URL and its checksum.
|
||||
1. CI builds the new release.
|
||||
1. At the end of the build:
|
||||
|
||||
- If green
|
||||
- the schemas `v5.3.4444-internal_database.schema.*.json` are uploaded to the `schemas/` folder.
|
||||
- If red
|
||||
- the schemas `v5.3.4444-internal_database.schema.*.json` are _NOT_ uploaded to the `schemas/` folder.
|
||||
- that's because if the release build failed, it means that it never existed, so there is no need to capture its existence as nobody will migrate from that version number.
|
||||
- the `schemas-v5.3.4444.tar.gz` tarball is removed from the `dist/` folder in the bucket. This is perfectly fine that there is no revision apart from the current PR that references it.
|
||||
|
||||
1. PR driving the release build is merged back in the base branch
|
||||
|
||||
- the updated buildfiles will now use that uploaded `schemas-v5.3.4444.tar.gz` tarball from now on, eliminating the need to fetch anything from GCS apart the tarball (until it's cached by Bazel).
|
||||
|
||||
## Q&A
|
||||
|
||||
> What happens if two release builds are built at the same time?
|
||||
|
||||
If two builds are going on at the same time, they won't interfere with each other, because the only artifacts that can be removed without notice are the schemas tarballs, which are
|
||||
only referenced by each individual release build. As for the schemas, the only time they get created is when the internal release is finally green and ready to be merged. If one of the two builds end
|
||||
up referencing the schemas from the other, it means they didn't happen at the same time, but instead that they happened sequentially. That's because GCS is guaranteeing us that file uploads are
|
||||
transactional, i.e it's not possible to list a file until it's fully uploaded.
|
||||
|
||||
> What happens if a release build fails. Can it mess with ulterior release builds?
|
||||
|
||||
It cannot, because the only time the schemas are finally added to `schemas/` is when the release build succeeds. This is why when we're regenerating the tarball, we are fetching
|
||||
all the known schemas _and_ adding the new one from the source tree at that point. Had we uploaded the new schemas at the beginning of the build instead, to then fetch everything to
|
||||
build the tarball, including the new one, we would have had the problem.
|
||||
|
||||
> How do we ensure that the `schema.*.json` in the source, at the revision we're cutting the release are correct?
|
||||
|
||||
This is covered by Bazel. These files are generated through `bazel run //dev:write_all_generated` which comes with automatically generated `diff_test` rules, which are comparing
|
||||
the files on disk, with the files it would generate. Therefore, if someone pushes code without updating the current schemas in the code, Bazel will fail the build. And if on that
|
||||
precise commit we would try to cut a release, that same exact test would run again and fail.
|
||||
|
||||
Therefore, we can safely use the current schemas when cutting a release.
|
||||
|
||||
> What happens if the _migrator_ is built with newer schemas, like 5.1.3333 that contains schemas for 5.2.4444?
|
||||
|
||||
The script that populates the schemas, when regenerating the tarball, in that case, would exclude all schemas above 5.1.X, so it won't happen.
|
||||
|
||||
> How does this work until we revamp the release process to match RFC 795?
|
||||
|
||||
The initial state has been created manually on the bucket, and there won't be any issues until we create a new release, which is at the time of writing this doc
|
||||
a periodic event, manually driven by the release captain. We can keep building the patch releases for 5.2.X with the old method, we just have to upload the
|
||||
new schemas to the bucket to ensure that the next release from `main`, i.e 5.3.X will be correct.
|
||||
|
||||
> How is that better than the previous flow?
|
||||
|
||||
- Before
|
||||
- Cutting a new release
|
||||
- Required to port the new schemas to `main` manually on each release.
|
||||
- Required Bazel to perform 280 individual HTTP requests sequentially to GitHub and GCS to fetch the schemas.
|
||||
- When building normally
|
||||
- Schemas are fully cached if the Bazel cache is warm. Otherwise, we go back to the 280 requests.
|
||||
- After
|
||||
- Cutting a new release
|
||||
- Schemas are always up to date when cutting a new release. No need to port changes to other release branches or `main`.
|
||||
- Schemas are downloaded concurrently - only takes a few second to grab all of them.
|
||||
- When building normally
|
||||
- Schemas are cached if the Bazel cache is warm. Otherwise, we download a single tarball of a few mbs.
|
||||
|
||||
> How do I see which schemas where used to build the _migrator_ container.
|
||||
|
||||
`tar tf $(bazel cquery //cmd/migrator:tar_schema_descriptions --output=files)` will show the content the container layer used
|
||||
to inject the schemas in the final _migrator_ container image.
|
||||
192
tools/release/generate_schemas_archive.sh
Executable file
192
tools/release/generate_schemas_archive.sh
Executable file
@ -0,0 +1,192 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
version="$1"
|
||||
major=""
|
||||
minor=""
|
||||
action="$2"
|
||||
repository_root="$3"
|
||||
|
||||
set -u
|
||||
|
||||
if [ "$#" -ne 3 ]; then
|
||||
echo "usage: [script] vX.Y.Z [inject-current-schemas|fetch-current-schemas] /absolute/path/to/repository/root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$action" != "inject-current-schemas" && "$action" != "fetch-current-schemas" ]]; then
|
||||
echo "usage: [script] vX.Y.Z [inject-current-schemas|fetch-current-schemas] /absolute/path/to/repository/root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $version =~ ^v[0-9]\.[0-9]+\.[0-9]+ ]]; then
|
||||
echo "version format is incorrect, usage: [script] vX.Y.Z"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# To avoid breaking previous builds by accident, we want the tarballs we're creating to be idempotent, i.e
|
||||
# if we recreate it with the same inputs, we get same exact tarball at the end.
|
||||
#
|
||||
# usage idempotent_tarball "foo" to produce foo.tar.gz containing files from ./*
|
||||
#
|
||||
# This is a bit tricky, as we have to manually eliminate anything that could change the result.
|
||||
# - Explicitly sort files in the archive so the ordering stays stable.
|
||||
# - Set the locale to C, so the sorting always have the same output.
|
||||
# - Set ownership to root:root
|
||||
# - Set the modified time to beginning of Unix time
|
||||
# - Use GNU tar regardless if on Linux or MacOs. BSDTar doesn't come with the flags we need produce the
|
||||
# same binaries, even if the implementation supposedly similar.
|
||||
# - GZip the tar file ourselves, using -n to not store the filename and more importantly the timestamp in the
|
||||
# metadata.
|
||||
function idempotent_tarball {
|
||||
local base="$1"
|
||||
local tarbin="tar"
|
||||
if tar --version | grep -q bsdtar; then
|
||||
echo "⚠️ BSDTar detected, using gtar to produce idempotent tarball."
|
||||
tarbin="gtar"
|
||||
fi
|
||||
|
||||
# Produces ${base}.tar
|
||||
LC_ALL=c "$tarbin" cf "${base}.tar" --owner=root:0 --group=root:0 --numeric-owner --mtime='UTC 1970-01-01' ./*
|
||||
|
||||
# Produces ${base}.tar.gz
|
||||
gzip -n "${base}.tar"
|
||||
}
|
||||
|
||||
bucket='gs://schemas-migrations'
|
||||
|
||||
if [[ $version =~ ^v([0-9]+)\.([0-9]+).[0-9]+$ ]]; then
|
||||
major=${BASH_REMATCH[1]}
|
||||
minor=${BASH_REMATCH[2]}
|
||||
else
|
||||
echo "Usage: [...] vX.Y.Z where X is the major version and Y the minor version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Generating an archive of all released database schemas for v$major.$minor"
|
||||
tmp_dir=$(mktemp -d)
|
||||
# shellcheck disable=SC2064
|
||||
trap "rm -Rf $tmp_dir" EXIT
|
||||
|
||||
# Downloading everything at once is much much faster and simple than fetching individual files
|
||||
# even if done concurrently.
|
||||
echo "--- Downloading all schemas from ${bucket}/schemas"
|
||||
gsutil -m -q cp "${bucket}/schemas/*" "$tmp_dir"
|
||||
|
||||
pushd "$tmp_dir"
|
||||
echo "--- Filtering out migrations after ${major}.${minor}"
|
||||
for file in *; do
|
||||
if [[ $file =~ ^v([0-9])\.([0-9]+) ]]; then
|
||||
found_major=${BASH_REMATCH[1]}
|
||||
found_minor=${BASH_REMATCH[2]}
|
||||
|
||||
# If the major version we're targeting is strictly greater the one we're looking at
|
||||
# we don't bother looking at minor version and we keep it.
|
||||
if [ "$major" -gt "$found_major" ]; then
|
||||
continue
|
||||
else
|
||||
# If the major version is the same, we need to inspect the minor versions to know
|
||||
# if we need to keep it or not.
|
||||
if [[ "$major" -eq "$found_major" && "$minor" -ge "$found_minor" ]]; then
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
# What's left has to be excluded.
|
||||
echo "Rejecting $file"
|
||||
rm "$file"
|
||||
fi
|
||||
done
|
||||
popd
|
||||
|
||||
if [[ $action == "fetch-current-schemas" ]]; then
|
||||
echo "--- Skipping current schema"
|
||||
must_exist_schemas=(
|
||||
"${tmp_dir}/${version}-internal_database_schema.json"
|
||||
"${tmp_dir}/${version}-internal_database_schema.codeintel.json"
|
||||
"${tmp_dir}/${version}-internal_database_schema.codeinsights.json"
|
||||
)
|
||||
|
||||
for f in "${must_exist_schemas[@]}"; do
|
||||
if [ -f "$f" ]; then
|
||||
echo "✅ Found $f database schema for ${version}"
|
||||
else
|
||||
echo "❌ Missing $f database schema for ${version}"
|
||||
echo "⚠️ Either this command was accidentally run with fetch-current-schemas while intending to create a release"
|
||||
echo "⚠️ or the currently archived database schemas are missing the current version, which indicates"
|
||||
echo "⚠️ a botched release."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "--- Injecting current schemas"
|
||||
must_not_exist_schemas=(
|
||||
"${tmp_dir}/${version}-internal_database_schema.json"
|
||||
"${tmp_dir}/${version}-internal_database_schema.codeintel.json"
|
||||
"${tmp_dir}/${version}-internal_database_schema.codeinsights.json"
|
||||
)
|
||||
|
||||
for f in "${must_not_exist_schemas[@]}"; do
|
||||
if [ -f "$f" ]; then
|
||||
echo "❌ Prior database schemas exists for ${version}"
|
||||
echo "⚠️ Either this command was accidentally run with fetch-current-schemas while intending to create"
|
||||
echo "⚠️ a release or a release was botched."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ No prior database schemas exist for ${version}"
|
||||
fi
|
||||
done
|
||||
|
||||
cp internal/database/schema.json "${tmp_dir}/${version}-internal_database_schema.json"
|
||||
cp internal/database/schema.codeintel.json "${tmp_dir}/${version}-internal_database_schema.codeintel.json"
|
||||
cp internal/database/schema.codeinsights.json "${tmp_dir}/${version}-internal_database_schema.codeinsights.json"
|
||||
fi
|
||||
|
||||
output_base_path="${PWD}/schemas-${version}"
|
||||
output_path="${output_base_path}.tar.gz"
|
||||
output_basename="$(basename "$output_path")"
|
||||
trap 'rm $output_path' EXIT
|
||||
|
||||
echo "--- Creating tarball '$output_path'"
|
||||
pushd "$tmp_dir"
|
||||
idempotent_tarball "$output_base_path"
|
||||
popd
|
||||
|
||||
checksum=$(sha256sum "$output_path" | cut -d ' ' -f1)
|
||||
echo "Checksum: $checksum"
|
||||
echo "--- Uploading tarball to ${bucket}/dist"
|
||||
|
||||
# Tarballs are reproducible, but the only reason for which the user would want to overwrite the existing one
|
||||
# is to fix a problem. We don't want anyone to run this by accident, so we explicitly ask for confirmation.
|
||||
if gsutil -q ls "${bucket}/dist/${output_basename}"; then
|
||||
echo "--- ⚠️ A database schemas tarball already exists for this version"
|
||||
echo "Type OVERWRITE followed by ENTER to confirm you want to overwrite it. Anything else will abort."
|
||||
read -p "Are you sure? " -r
|
||||
echo
|
||||
if [[ "$REPLY" != "OVERWRITE" ]]
|
||||
then
|
||||
echo "Aborting, tarball left intact on the bucket."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
gsutil -q cp "$output_path" "${bucket}/dist/"
|
||||
|
||||
echo "--- Updating buildfiles"
|
||||
# Starlak is practically the same as Python, so we use that matcher.
|
||||
comby -matcher .py \
|
||||
-in-place \
|
||||
'urls = [":[1]"],' \
|
||||
"urls = [\"https://storage.googleapis.com/schemas-migrations/dist/$output_basename\"]," \
|
||||
"${repository_root}/tools/release/schema_deps.bzl"
|
||||
|
||||
comby -matcher .py \
|
||||
-in-place \
|
||||
'sha256 = ":[1]",' \
|
||||
"sha256 = \"$checksum\"," \
|
||||
"${repository_root}/tools/release/schema_deps.bzl"
|
||||
|
||||
echo "--- Summary"
|
||||
tar tvf "$output_path"
|
||||
echo "Uploaded ${bucket}/dist/${output_basename} sha256:${checksum}"
|
||||
13
tools/release/schema_deps.bzl
Normal file
13
tools/release/schema_deps.bzl
Normal file
@ -0,0 +1,13 @@
|
||||
"""
|
||||
This module defines the third party dependency containing all database schemas that the
|
||||
migrator use to handle migrations. See the README.md in this folder for reference.
|
||||
"""
|
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
|
||||
|
||||
def schema_deps():
|
||||
http_file(
|
||||
name = "schemas_archive",
|
||||
urls = ["https://storage.googleapis.com/schemas-migrations/dist/schemas-v5.2.1.tar.gz"],
|
||||
sha256 = "3ec54f2d132ba5fc4f084f3bc76650f1c759ab32b5b73aba2ac9df91098ffeaf",
|
||||
)
|
||||
38
tools/release/upload_current_schemas.sh
Executable file
38
tools/release/upload_current_schemas.sh
Executable file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
|
||||
version="$1"
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "usage: [script] vX.Y.Z"
|
||||
fi
|
||||
|
||||
if ! [[ $version =~ ^v[0-9]\.[0-9]+\.[0-9]+ ]]; then
|
||||
echo "version format is incorrect, usage: [script] vX.Y.Z"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bucket='gs://schemas-migrations'
|
||||
|
||||
tmp_dir=$(mktemp -d)
|
||||
trap 'rm -Rf $tmp_dir' EXIT
|
||||
|
||||
echo "--- Ensuring that databases schemas do not exist for this version"
|
||||
if gsutil -q ls "${bucket}/schemas/${version}-internal_database_schema*.json"; then
|
||||
echo "⚠️ Found the above schemas in the bucket."
|
||||
echo "--- ❌ Database schemas for version ${version} already exists: aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "--- Copying internal/database/schemas*.json to ${version}-internal_database_schema*.json"
|
||||
cp internal/database/schema.json "${tmp_dir}/${version}-internal_database_schema.json"
|
||||
cp internal/database/schema.codeintel.json "${tmp_dir}/${version}-internal_database_schema.codeintel.json"
|
||||
cp internal/database/schema.codeinsights.json "${tmp_dir}/${version}-internal_database_schema.codeinsights.json"
|
||||
|
||||
echo "--- Uploading to GCS Bucket '${bucket}/schemas'"
|
||||
pushd "$tmp_dir"
|
||||
gsutil cp ./*.json "${bucket}/schemas/"
|
||||
popd
|
||||
|
||||
echo "--- ✅ Schemas for ${version} are now available for other releases"
|
||||
Loading…
Reference in New Issue
Block a user