sg migration: Remove schemadoc (#35905)

This commit is contained in:
Eric Fritz 2022-05-24 11:19:19 -05:00 committed by GitHub
parent e2154c6e91
commit b15787d4e2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 84 additions and 134 deletions

View File

@ -1,3 +0,0 @@
# See https://github.com/sourcegraph/codenotify for documentation.
**/* @efritz

View File

@ -1,110 +0,0 @@
package main
import (
"context"
"database/sql"
"fmt"
"log"
"os"
"os/exec"
_ "github.com/lib/pq"
"golang.org/x/sync/errgroup"
connections "github.com/sourcegraph/sourcegraph/internal/database/connections/live"
descriptions "github.com/sourcegraph/sourcegraph/internal/database/migration/schemas"
migrationstore "github.com/sourcegraph/sourcegraph/internal/database/migration/store"
"github.com/sourcegraph/sourcegraph/internal/observation"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
type runFunc func(quiet bool, cmd ...string) (string, error)
const databaseNamePrefix = "schemadoc-gen-temp-"
var logger = log.New(os.Stderr, "", log.LstdFlags)
type databaseFactory func(dsn string, appName string, observationContext *observation.Context) (*sql.DB, error)
var schemas = map[string]struct {
destinationFilename string
factory databaseFactory
}{
"frontend": {"schema", connections.MigrateNewFrontendDB},
"codeintel": {"schema.codeintel", connections.MigrateNewCodeIntelDB},
"insights": {"schema.codeinsights", connections.MigrateNewCodeInsightsDB},
}
// This script generates markdown formatted output containing descriptions of
// the current dabase schema, obtained from postgres. The correct PGHOST,
// PGPORT, PGUSER etc. env variables must be set to run this script.
func main() {
if err := mainErr(); err != nil {
log.Fatal(err)
}
}
func mainErr() error {
dataSourcePrefix := "dbname=" + databaseNamePrefix
g, _ := errgroup.WithContext(context.Background())
for name, schema := range schemas {
name, schema := name, schema
g.Go(func() error {
return generateAndWrite(name, schema.factory, dataSourcePrefix+name, nil, schema.destinationFilename)
})
}
return g.Wait()
}
func generateAndWrite(name string, factory databaseFactory, dataSource string, commandPrefix []string, destinationFile string) error {
run := runWithPrefix(commandPrefix)
// Try to drop a database if it already exists
_, _ = run(true, "dropdb", databaseNamePrefix+name)
// Let's also try to clean up after ourselves
defer func() { _, _ = run(true, "dropdb", databaseNamePrefix+name) }()
if out, err := run(false, "createdb", databaseNamePrefix+name); err != nil {
return errors.Wrap(err, fmt.Sprintf("run: %s", out))
}
db, err := factory(dataSource, "", &observation.TestContext)
if err != nil {
return err
}
defer db.Close()
store := migrationstore.NewWithDB(db, "schema_migrations", migrationstore.NewOperations(&observation.TestContext))
schemas, err := store.Describe(context.Background())
if err != nil {
return err
}
schema := schemas["public"]
if err := os.WriteFile(destinationFile+".md", []byte(descriptions.NewPSQLFormatter().Format(schema)), os.ModePerm); err != nil {
return err
}
if err := os.WriteFile(destinationFile+".json", []byte(descriptions.NewJSONFormatter().Format(schema)), os.ModePerm); err != nil {
return err
}
return nil
}
func runWithPrefix(prefix []string) runFunc {
return func(quiet bool, cmd ...string) (string, error) {
cmd = append(prefix, cmd...)
c := exec.Command(cmd[0], cmd[1:]...)
if !quiet {
c.Stderr = logger.Writer()
}
out, err := c.Output()
return string(out), err
}
}

View File

@ -33,7 +33,7 @@ const (
squasherContainerPostgresName = "postgres"
)
func SquashAll(database db.Database, inContainer bool, filepath string) error {
func SquashAll(database db.Database, inContainer, skipTeardown bool, filepath string) error {
definitions, err := readDefinitions(database)
if err != nil {
return err
@ -43,7 +43,7 @@ func SquashAll(database db.Database, inContainer bool, filepath string) error {
leafIDs = append(leafIDs, leaf.ID)
}
squashedUpMigration, _, err := generateSquashedMigrations(database, leafIDs, inContainer)
squashedUpMigration, _, err := generateSquashedMigrations(database, leafIDs, inContainer, skipTeardown)
if err != nil {
return err
}
@ -51,7 +51,7 @@ func SquashAll(database db.Database, inContainer bool, filepath string) error {
return os.WriteFile(filepath, []byte(squashedUpMigration), os.ModePerm)
}
func Squash(database db.Database, commit string, inContainer bool) error {
func Squash(database db.Database, commit string, inContainer, skipTeardown bool) error {
definitions, err := readDefinitions(database)
if err != nil {
return err
@ -66,7 +66,7 @@ func Squash(database db.Database, commit string, inContainer bool) error {
}
// Run migrations up to the new selected root and dump the database into a single migration file pair
squashedUpMigration, squashedDownMigration, err := generateSquashedMigrations(database, []int{newRoot.ID}, inContainer)
squashedUpMigration, squashedDownMigration, err := generateSquashedMigrations(database, []int{newRoot.ID}, inContainer, skipTeardown)
if err != nil {
return err
}
@ -181,13 +181,15 @@ func selectNewRootMigration(database db.Database, ds *definition.Definitions, co
// generateSquashedMigrations generates the content of a migration file pair that contains the contents
// of a database up to a given migration index.
func generateSquashedMigrations(database db.Database, targetVersions []int, inContainer bool) (up, down string, err error) {
func generateSquashedMigrations(database db.Database, targetVersions []int, inContainer, skipTeardown bool) (up, down string, err error) {
postgresDSN, teardown, err := setupDatabaseForSquash(database, inContainer)
if err != nil {
return "", "", err
}
defer func() {
err = teardown(err)
if !skipTeardown {
err = teardown(err)
}
}()
if err := runTargetedUpMigrations(database, targetVersions, postgresDSN); err != nil {

View File

@ -46,6 +46,14 @@ var (
Destination: &squashInContainer,
}
skipTeardown bool
skipTeardownFlag = &cli.BoolFlag{
Name: "skip-teardown",
Usage: "Skip tearing down the database created to run all registered migrations",
Value: false,
Destination: &skipTeardown,
}
outputFilepath string
outputFilepathFlag = &cli.StringFlag{
Name: "f",
@ -114,7 +122,7 @@ var (
ArgsUsage: "<current-release>",
Usage: "Collapse migration files from historic releases together",
Description: cliutil.ConstructLongHelp(),
Flags: []cli.Flag{migrateTargetDatabaseFlag, squashInContainerFlag},
Flags: []cli.Flag{migrateTargetDatabaseFlag, squashInContainerFlag, skipTeardownFlag},
Action: execAdapter(squashExec),
}
@ -123,7 +131,7 @@ var (
ArgsUsage: "",
Usage: "Collapse schema definitions into a single SQL file",
Description: cliutil.ConstructLongHelp(),
Flags: []cli.Flag{migrateTargetDatabaseFlag, squashInContainerFlag, outputFilepathFlag},
Flags: []cli.Flag{migrateTargetDatabaseFlag, squashInContainerFlag, skipTeardownFlag, outputFilepathFlag},
Action: execAdapter(squashAllExec),
}
@ -265,7 +273,7 @@ func squashExec(ctx context.Context, args []string) (err error) {
}
std.Out.Writef("Squashing migration files defined up through %s", commit)
return migration.Squash(database, commit, squashInContainer)
return migration.Squash(database, commit, squashInContainer, skipTeardown)
}
func squashAllExec(ctx context.Context, args []string) (err error) {
@ -283,7 +291,7 @@ func squashAllExec(ctx context.Context, args []string) (err error) {
return flag.ErrHelp
}
return migration.SquashAll(database, squashInContainer, outputFilepath)
return migration.SquashAll(database, squashInContainer, skipTeardown, outputFilepath)
}
func leavesExec(ctx context.Context, args []string) (err error) {

View File

@ -1,7 +1,4 @@
package database
// $PGHOST, $PGUSER, $PGPORT etc. must be set to run this generate script.
//go:generate env GO111MODULE=on go run ../../dev/schemadoc/main.go
//go:generate env GO111MODULE=on go run ../../dev/sg migration squash-all -db frontend -f ../../migrations/frontend/squashed.sql
//go:generate env GO111MODULE=on go run ../../dev/sg migration squash-all -db codeintel -f ../../migrations/codeintel/squashed.sql
//go:generate env GO111MODULE=on go run ../../dev/sg migration squash-all -db codeinsights -f ../../migrations/codeinsights/squashed.sql
// $PGHOST, $PGUSER, $PGPORT etc. must be set to run this generate script
//go:generate ./gen.sh

34
internal/database/gen.sh Executable file
View File

@ -0,0 +1,34 @@
#!/usr/bin/env bash
cd "$(dirname "${BASH_SOURCE[0]}")/../.."
set -ex
export GO111MODULE=on
pushd ./dev/sg
go build -o ../../tmp-sg
popd
function finish {
rm -f ./tmp-sg
}
trap finish EXIT
# Squash migrations and create new SQL file; leave database as-is so we can re-describe in different formats
./tmp-sg migration squash-all -skip-teardown -db frontend -f migrations/frontend/squashed.sql
./tmp-sg migration squash-all -skip-teardown -db codeintel -f migrations/codeintel/squashed.sql
./tmp-sg migration squash-all -skip-teardown -db codeinsights -f migrations/codeinsights/squashed.sql
export PGDATASOURCE="postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:${PGPORT}/sg-squasher-frontend"
export CODEINTEL_PGDATASOURCE="postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:${PGPORT}/sg-squasher-codeintel"
export CODEINSIGHTS_PGDATASOURCE="postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:${PGPORT}/sg-squasher-codeinsights"
# Output-psql formatted schema description
./tmp-sg migration describe -db frontend --format=psql -force -out internal/database/schema.md
./tmp-sg migration describe -db codeintel --format=psql -force -out internal/database/schema.codeintel.md
./tmp-sg migration describe -db codeinsights --format=psql -force -out internal/database/schema.codeinsights.md
# Output json-formatted schema description
./tmp-sg migration describe -db frontend --format=json -force -out internal/database/schema.json
./tmp-sg migration describe -db codeintel --format=json -force -out internal/database/schema.codeintel.json
./tmp-sg migration describe -db codeinsights --format=json -force -out internal/database/schema.codeinsights.json

View File

@ -37,31 +37,40 @@ func Describe(commandName string, factory RunnerFactory, outFactory OutputFactor
Required: false,
}
action := makeAction(outFactory, func(ctx context.Context, cmd *cli.Context, out *output.Output) error {
output, shouldDecorate, err := getOutput(out, outFlag.Get(cmd), forceFlag.Get(cmd), noColorFlag.Get(cmd))
action := makeAction(outFactory, func(ctx context.Context, cmd *cli.Context, out *output.Output) (err error) {
w, shouldDecorate, err := getOutput(out, outFlag.Get(cmd), forceFlag.Get(cmd), noColorFlag.Get(cmd))
if err != nil {
return err
}
defer output.Close()
defer w.Close()
formatter := getFormatter(formatFlag.Get(cmd), shouldDecorate)
if formatter == nil {
return flagHelp(out, "unrecognized format %q (must be json or psql)", formatFlag.Get(cmd))
}
_, store, err := setupStore(ctx, factory, schemaNameFlag.Get(cmd))
schemaName := schemaNameFlag.Get(cmd)
_, store, err := setupStore(ctx, factory, schemaName)
if err != nil {
return err
}
pending := out.Pending(output.Linef("", output.StylePending, "Describing database %s...", schemaName))
defer func() {
if err == nil {
pending.Complete(output.Linef(output.EmojiSuccess, output.StyleSuccess, "Description of %s written to target", schemaName))
} else {
pending.Destroy()
}
}()
schemas, err := store.Describe(ctx)
if err != nil {
return err
}
schema := schemas["public"]
if _, err := io.Copy(output, strings.NewReader(formatter.Format(schema))); err != nil {
if _, err := io.Copy(w, strings.NewReader(formatter.Format(schema))); err != nil {
return err
}

View File

@ -235,6 +235,17 @@ CREATE FUNCTION func_configuration_policies_update() RETURNS trigger
END;
$$;
CREATE FUNCTION func_insert_gitserver_repo() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO gitserver_repos
(repo_id, shard_id)
VALUES (NEW.id, '');
RETURN NULL;
END;
$$;
CREATE FUNCTION func_lsif_uploads_delete() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -3625,6 +3636,8 @@ CREATE TRIGGER trigger_configuration_policies_insert AFTER INSERT ON lsif_config
CREATE TRIGGER trigger_configuration_policies_update BEFORE UPDATE OF name, pattern, retention_enabled, retention_duration_hours, type, retain_intermediate_commits ON lsif_configuration_policies FOR EACH ROW EXECUTE FUNCTION func_configuration_policies_update();
CREATE TRIGGER trigger_gitserver_repo_insert AFTER INSERT ON repo FOR EACH ROW EXECUTE FUNCTION func_insert_gitserver_repo();
CREATE TRIGGER trigger_lsif_uploads_delete AFTER DELETE ON lsif_uploads REFERENCING OLD TABLE AS old FOR EACH STATEMENT EXECUTE FUNCTION func_lsif_uploads_delete();
CREATE TRIGGER trigger_lsif_uploads_insert AFTER INSERT ON lsif_uploads FOR EACH ROW EXECUTE FUNCTION func_lsif_uploads_insert();