mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 11:01:44 +00:00
parent
ad082497f2
commit
ae34b1df84
@ -25,5 +25,4 @@ client/testing/package.json=366497236
|
||||
client/web/package.json=-1637464847
|
||||
client/wildcard/package.json=362918481
|
||||
client/vscode/package.json=1345248728
|
||||
dev/release/package.json=1426426960
|
||||
pnpm-workspace.yaml=-69372893
|
||||
|
||||
@ -24,7 +24,6 @@ client/testing/node_modules
|
||||
client/web/node_modules
|
||||
client/web-sveltekit/node_modules
|
||||
client/wildcard/node_modules
|
||||
dev/release/node_modules
|
||||
|
||||
cmd/symbols/squirrel/test_repos/starlark
|
||||
|
||||
|
||||
@ -59,5 +59,4 @@ dev/backcompat/flakes.json
|
||||
# Errors when used with @ianvs/prettier-plugin-sort-imports due to TS language API hitting global `eval`
|
||||
client/browser/src/types/webextension-polyfill/index.d.ts
|
||||
|
||||
dev/release/release-config.jsonc
|
||||
graphql-operations.ts
|
||||
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -45,7 +45,6 @@
|
||||
"typescript.format.semicolons": "remove",
|
||||
"typescript.tsc.autoDetect": "off",
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"eslint.packageManager": "pnpm",
|
||||
"eslint.lintTask.enable": false,
|
||||
"eslint.validate": ["javascript", "javascriptreact", "typescript", "typescriptreact"],
|
||||
"editor.codeActionsOnSave": {},
|
||||
@ -53,7 +52,7 @@
|
||||
"eslint.options": {
|
||||
"cache": true
|
||||
},
|
||||
"eslint.workingDirectories": ["./dev/release", "./client/*"],
|
||||
"eslint.workingDirectories": ["./client/*"],
|
||||
"go.lintTool": "golangci-lint",
|
||||
"shellformat.flag": "-i 2 -ci",
|
||||
"vscode-graphql.useSchemaFileDefinitions": true,
|
||||
|
||||
7
.vscode/tasks.json
vendored
7
.vscode/tasks.json
vendored
@ -85,13 +85,6 @@
|
||||
"path": "client/web/",
|
||||
"problemMatcher": ["$eslint-stylish"]
|
||||
},
|
||||
{
|
||||
"label": "eslint:release",
|
||||
"type": "npm",
|
||||
"script": "eslint",
|
||||
"path": "dev/release/",
|
||||
"problemMatcher": ["$eslint-stylish"]
|
||||
},
|
||||
{
|
||||
"label": "eslint:extension-api",
|
||||
"type": "npm",
|
||||
|
||||
@ -101,12 +101,6 @@ func ParseDiff(files []string) (diff Diff, changedFiles ChangedFiles) {
|
||||
diff |= Pnpm
|
||||
}
|
||||
|
||||
// dev/release contains a nodejs script that doesn't have tests but needs to be
|
||||
// linted with Client linters. We skip the release config file to reduce friction editing during releases.
|
||||
if strings.HasPrefix(p, "dev/release/") && !strings.Contains(p, "release-config") {
|
||||
diff |= Client
|
||||
}
|
||||
|
||||
// Affects GraphQL
|
||||
if strings.HasSuffix(p, ".graphql") {
|
||||
diff |= GraphQL
|
||||
|
||||
@ -17,7 +17,7 @@ DIRS=(
|
||||
client/build-config
|
||||
client/client-api
|
||||
client/codeintellify
|
||||
client/common
|
||||
client/common
|
||||
client/extension-api
|
||||
client/extension-api-types
|
||||
client/http-client
|
||||
@ -30,7 +30,6 @@ DIRS=(
|
||||
client/testing
|
||||
client/vscode
|
||||
client/wildcard
|
||||
dev/release
|
||||
)
|
||||
# Keep the list of client workspaces in alphabetical order!
|
||||
|
||||
|
||||
@ -1,10 +0,0 @@
|
||||
const baseConfig = require('../../.eslintrc')
|
||||
module.exports = {
|
||||
extends: '../../.eslintrc.js',
|
||||
parserOptions: {
|
||||
...baseConfig.parserOptions,
|
||||
project: __dirname + '/tsconfig.json',
|
||||
},
|
||||
rules: { 'no-console': 'off' },
|
||||
overrides: baseConfig.overrides,
|
||||
}
|
||||
1
dev/release/.gitignore
vendored
1
dev/release/.gitignore
vendored
@ -1 +0,0 @@
|
||||
.secrets/
|
||||
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
# Sourcegraph release tool
|
||||
|
||||
This directory contains scripts and code to automate our releases. Refer to
|
||||
[the handbook](https://handbook.sourcegraph.com/engineering/releases) for details
|
||||
on our release process and how this tool is used.
|
||||
|
||||
To see all available steps:
|
||||
|
||||
```sh
|
||||
pnpm run release help # add 'all' to see test commands as well
|
||||
```
|
||||
|
||||
Before using this tool, please verify that the [release configuration](./release-config.jsonc)
|
||||
is set up correctly.
|
||||
@ -1,14 +0,0 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "@sourcegraph/dev-release",
|
||||
"version": "0.0.1",
|
||||
"description": "Scripts for managing release captain duties",
|
||||
"scripts": {
|
||||
"release": "ts-node --transpile-only ./src/main.ts",
|
||||
"lint:js": "eslint --cache 'src/**/*.[jt]s?(x)'"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/luxon": "^3.2.0",
|
||||
"luxon": "^3.2.1"
|
||||
}
|
||||
}
|
||||
@ -1,154 +0,0 @@
|
||||
import commandExists from 'command-exists'
|
||||
import execa from 'execa'
|
||||
import fetch from 'node-fetch'
|
||||
import YAML from 'yaml'
|
||||
|
||||
import type { CreatedChangeset } from './github'
|
||||
import { readLine, cacheFolder } from './util'
|
||||
|
||||
// https://handbook.sourcegraph.com/departments/engineering/dev/process/deployments/instances/#sourcegraphsourcegraphcom-s2
|
||||
const DEFAULT_SRC_ENDPOINT = 'https://sourcegraph.sourcegraph.com'
|
||||
|
||||
interface SourcegraphCLIConfig {
|
||||
SRC_ENDPOINT: string
|
||||
SRC_ACCESS_TOKEN: string
|
||||
[index: string]: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves src-cli configuration and ensures src-cli exists.
|
||||
*/
|
||||
export async function sourcegraphCLIConfig(): Promise<SourcegraphCLIConfig> {
|
||||
await commandExists('src') // CLI must be present for batch change interactions
|
||||
return {
|
||||
SRC_ENDPOINT: DEFAULT_SRC_ENDPOINT,
|
||||
// I updated the file name here to avoid a situation where folks with existing s2 token
|
||||
// cached will get a 403 because it's not valid for S2.
|
||||
SRC_ACCESS_TOKEN: await readLine('s2 src-cli token: ', `${cacheFolder}/src-cli-s2.txt`),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters defining a batch change to interact with.
|
||||
*
|
||||
* Generate `cliConfig` using `sourcegraphCLIConfig()`.
|
||||
*/
|
||||
export interface BatchChangeOptions {
|
||||
name: string
|
||||
namespace: string
|
||||
cliConfig: SourcegraphCLIConfig
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate batch change configuration for a given release.
|
||||
*/
|
||||
export function releaseTrackingBatchChange(version: string, cliConfig: SourcegraphCLIConfig): BatchChangeOptions {
|
||||
return {
|
||||
name: `release-sourcegraph-${version}`,
|
||||
namespace: 'sourcegraph',
|
||||
cliConfig,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a URL for a batch change that would be created under the given camapign options.
|
||||
*
|
||||
* Does not ensure the batch change exists.
|
||||
*/
|
||||
export function batchChangeURL(options: BatchChangeOptions): string {
|
||||
return `${options.cliConfig.SRC_ENDPOINT}/organizations/${options.namespace}/batch-changes/${options.name}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new batch change from a set of changes.
|
||||
*/
|
||||
export async function createBatchChange(
|
||||
changes: CreatedChangeset[],
|
||||
options: BatchChangeOptions,
|
||||
description: string
|
||||
): Promise<void> {
|
||||
// create a batch change spec
|
||||
const importChangesets = changes.map(change => ({
|
||||
repository: `github.com/${change.repository}`,
|
||||
externalIDs: [change.pullRequestNumber],
|
||||
}))
|
||||
// apply batch change
|
||||
// eslint-disable-next-line @typescript-eslint/return-await
|
||||
return await applyBatchChange(
|
||||
{
|
||||
name: options.name,
|
||||
description,
|
||||
importChangesets,
|
||||
},
|
||||
options
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Append changes to an existing batch change.
|
||||
*/
|
||||
export async function addToBatchChange(
|
||||
changes: { repository: string; pullRequestNumber: number }[],
|
||||
options: BatchChangeOptions
|
||||
): Promise<void> {
|
||||
const response = await fetch(`${options.cliConfig.SRC_ENDPOINT}/.api/graphql`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `token ${options.cliConfig.SRC_ACCESS_TOKEN}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: `query getBatchChanges($namespace:String!) {
|
||||
organization(name:$namespace) {
|
||||
batchChanges(first:99) {
|
||||
nodes { name currentSpec { originalInput } }
|
||||
}
|
||||
}
|
||||
}`,
|
||||
variables: {
|
||||
namespace: options.namespace,
|
||||
},
|
||||
}),
|
||||
})
|
||||
const {
|
||||
data: {
|
||||
organization: {
|
||||
batchChanges: { nodes: results },
|
||||
},
|
||||
},
|
||||
} = (await response.json()) as {
|
||||
data: { organization: { batchChanges: { nodes: { name: string; currentSpec: { originalInput: string } }[] } } }
|
||||
}
|
||||
const batchChange = results.find(result => result.name === options.name)
|
||||
if (!batchChange) {
|
||||
throw new Error(`Cannot find batch change ${options.name}`)
|
||||
}
|
||||
|
||||
const importChangesets = changes.map(change => ({
|
||||
repository: `github.com/${change.repository}`,
|
||||
externalIDs: [change.pullRequestNumber],
|
||||
}))
|
||||
const newSpec = YAML.parse(batchChange.currentSpec.originalInput) as BatchChangeSpec
|
||||
newSpec.importChangesets.push(...importChangesets)
|
||||
await applyBatchChange(newSpec, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Subset of batch change spec: https://sourcegraph.com/docs/batch_changes/references/batch_spec_yaml_reference
|
||||
*/
|
||||
interface BatchChangeSpec {
|
||||
name: string
|
||||
description: string
|
||||
importChangesets: { repository: string; externalIDs: number[] }[]
|
||||
}
|
||||
|
||||
async function applyBatchChange(batchChange: BatchChangeSpec, options: BatchChangeOptions): Promise<void> {
|
||||
const batchChangeYAML = YAML.stringify(batchChange)
|
||||
console.log(`Rendered batch change spec:\n\n${batchChangeYAML}`)
|
||||
|
||||
// apply the batch change
|
||||
await execa('src', ['batch', 'apply', '-namespace', options.namespace, '-f', '-'], {
|
||||
stdout: 'inherit',
|
||||
input: batchChangeYAML,
|
||||
env: options.cliConfig,
|
||||
})
|
||||
}
|
||||
@ -1,26 +0,0 @@
|
||||
export const divider = '<!-- START CHANGELOG -->'
|
||||
|
||||
export const releaseTemplate = `${divider}
|
||||
|
||||
## Unreleased
|
||||
|
||||
### Added
|
||||
|
||||
-
|
||||
|
||||
### Changed
|
||||
|
||||
-
|
||||
|
||||
### Fixed
|
||||
|
||||
-
|
||||
|
||||
### Removed
|
||||
|
||||
-`
|
||||
|
||||
export const simpleReleaseTemplate = `${divider}
|
||||
|
||||
## Unreleased
|
||||
`
|
||||
@ -1,17 +0,0 @@
|
||||
import { readFileSync } from 'fs'
|
||||
|
||||
import { load as loadYAML } from 'js-yaml'
|
||||
|
||||
export interface Metadata {
|
||||
apiVersion: string
|
||||
name: string
|
||||
description: string
|
||||
type: string
|
||||
version: string
|
||||
appVersion: string
|
||||
}
|
||||
|
||||
export function parseChartMetadata(chartYamlPath: string): Metadata {
|
||||
const chartYamlContents = readFileSync(chartYamlPath, 'utf8').toString()
|
||||
return loadYAML(chartYamlContents) as Metadata
|
||||
}
|
||||
@ -1,293 +0,0 @@
|
||||
import { readFileSync, writeFileSync } from 'fs'
|
||||
|
||||
import chalk from 'chalk'
|
||||
import { parse as parseJSONC } from 'jsonc-parser'
|
||||
import { DateTime } from 'luxon'
|
||||
import * as semver from 'semver'
|
||||
import { SemVer } from 'semver'
|
||||
|
||||
import { getPreviousVersion } from './git'
|
||||
import { retryInput } from './util'
|
||||
|
||||
const releaseConfigPath = 'release-config.jsonc'
|
||||
|
||||
/**
|
||||
* Release configuration file format
|
||||
*/
|
||||
export interface Config {
|
||||
teamEmail: string
|
||||
|
||||
captainSlackUsername: string
|
||||
captainGitHubUsername: string
|
||||
|
||||
previousRelease: string
|
||||
upcomingRelease: string
|
||||
|
||||
oneWorkingWeekBeforeRelease: string
|
||||
threeWorkingDaysBeforeRelease: string
|
||||
releaseDate: string
|
||||
oneWorkingDayAfterRelease: string
|
||||
oneWorkingWeekAfterRelease: string
|
||||
|
||||
slackAnnounceChannel: string
|
||||
|
||||
dryRun: {
|
||||
tags?: boolean
|
||||
changesets?: boolean
|
||||
trackingIssues?: boolean
|
||||
slack?: boolean
|
||||
calendar?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export async function getActiveRelease(config: ReleaseConfig): Promise<ActiveRelease> {
|
||||
if (!config.in_progress || config.in_progress.releases.length === 0) {
|
||||
console.log(chalk.yellow('No active releases are defined! Attempting to activate...'))
|
||||
await activateRelease(config)
|
||||
}
|
||||
if (!config.in_progress) {
|
||||
throw new Error('unable to activate a release!')
|
||||
}
|
||||
if (config.in_progress.releases.length > 1) {
|
||||
throw new Error(
|
||||
chalk.red(
|
||||
'The release config has multiple versions activated. This feature is not yet supported by the release tool! Please activate only a single release.'
|
||||
)
|
||||
)
|
||||
}
|
||||
const rel = config.in_progress.releases[0]
|
||||
const def = config.scheduledReleases[rel.version]
|
||||
const version = new SemVer(rel.version)
|
||||
return {
|
||||
version,
|
||||
previous: new SemVer(rel.previous),
|
||||
...(def as ReleaseDates),
|
||||
...(def as ReleaseCaptainInformation),
|
||||
branch: `${version.major}.${version.minor}`,
|
||||
srcCliVersion: config.in_progress.srcCliVersion ? new SemVer(config.in_progress.srcCliVersion) : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
export function loadReleaseConfig(): ReleaseConfig {
|
||||
return parseJSONC(readFileSync(releaseConfigPath).toString()) as ReleaseConfig
|
||||
}
|
||||
|
||||
export function saveReleaseConfig(config: ReleaseConfig): void {
|
||||
writeFileSync(releaseConfigPath, JSON.stringify(config, null, 2))
|
||||
}
|
||||
|
||||
export function newRelease(
|
||||
version: SemVer,
|
||||
releaseDate: DateTime,
|
||||
captainGithub: string,
|
||||
captainSlack: string
|
||||
): ScheduledReleaseDefinition {
|
||||
return {
|
||||
...releaseDates(releaseDate, version.patch === 0),
|
||||
current: version.version,
|
||||
captainGitHubUsername: captainGithub,
|
||||
captainSlackUsername: captainSlack,
|
||||
}
|
||||
}
|
||||
|
||||
export async function newReleaseFromInput(versionOverride?: SemVer): Promise<ScheduledReleaseDefinition> {
|
||||
let version = versionOverride
|
||||
if (!version) {
|
||||
version = await selectVersionWithSuggestion('Enter the desired version number')
|
||||
}
|
||||
|
||||
const releaseDateStr = await retryInput(
|
||||
'Enter the release date (YYYY-MM-DD). Enter blank to use current date: ',
|
||||
val => {
|
||||
if (val && /^\d{4}-\d{2}-\d{2}$/.test(val)) {
|
||||
return true
|
||||
}
|
||||
// this will return false if the input doesn't match the regexp above but does exist, allowing blank input to still be valid
|
||||
return !val
|
||||
},
|
||||
'invalid date, expected format YYYY-MM-DD'
|
||||
)
|
||||
let releaseTime: DateTime
|
||||
if (!releaseDateStr) {
|
||||
releaseTime = DateTime.now().setZone('America/Los_Angeles')
|
||||
console.log(chalk.blue(`Using current time: ${releaseTime.toString()}`))
|
||||
} else {
|
||||
releaseTime = DateTime.fromISO(releaseDateStr, { zone: 'America/Los_Angeles' })
|
||||
}
|
||||
|
||||
const captainGithubUsername = await retryInput('Enter the github username of the release captain: ', val => !!val)
|
||||
const captainSlackUsername = await retryInput('Enter the slack username of the release captain: ', val => !!val)
|
||||
|
||||
const rel = newRelease(version, releaseTime, captainGithubUsername, captainSlackUsername)
|
||||
console.log(chalk.green('Version created:'))
|
||||
console.log(chalk.green(JSON.stringify(rel, null, 2)))
|
||||
return rel
|
||||
}
|
||||
|
||||
function releaseDates(releaseDate: DateTime, includePatches?: boolean): ReleaseDates {
|
||||
releaseDate = releaseDate.set({ hour: 10 })
|
||||
return {
|
||||
codeFreezeDate: releaseDate.plus({ days: -7 }).toString(),
|
||||
securityApprovalDate: releaseDate.plus({ days: -7 }).toString(),
|
||||
releaseDate: releaseDate.toString(),
|
||||
patches: includePatches
|
||||
? generatePatchDates(releaseDate, releaseDate.plus({ months: 3 }), 2).map(rdate => rdate.toString())
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
function generatePatchDates(start: DateTime, end: DateTime, intervalWeeks: number): DateTime[] {
|
||||
const patches = []
|
||||
let current: DateTime = start.plus({ weeks: intervalWeeks })
|
||||
while (current < end.minus({ weeks: 1 })) {
|
||||
patches.push(current)
|
||||
current = current.plus({ weeks: intervalWeeks })
|
||||
}
|
||||
return patches
|
||||
}
|
||||
|
||||
export function addScheduledRelease(config: ReleaseConfig, release: ScheduledReleaseDefinition): ReleaseConfig {
|
||||
config.scheduledReleases[release.current] = release
|
||||
return config
|
||||
}
|
||||
|
||||
export function removeScheduledRelease(config: ReleaseConfig, version: string): ReleaseConfig {
|
||||
delete config.scheduledReleases[version]
|
||||
return config
|
||||
}
|
||||
|
||||
export interface ReleaseDates {
|
||||
releaseDate: string
|
||||
codeFreezeDate: string
|
||||
securityApprovalDate: string
|
||||
patches?: string[]
|
||||
}
|
||||
|
||||
export interface ActiveRelease extends ReleaseCaptainInformation, ReleaseDates {
|
||||
version: SemVer
|
||||
previous: SemVer
|
||||
branch: string
|
||||
srcCliVersion?: SemVer
|
||||
}
|
||||
|
||||
export interface ActiveReleaseDefinition {
|
||||
version: string
|
||||
previous: string
|
||||
}
|
||||
|
||||
export interface ReleaseCaptainInformation {
|
||||
captainSlackUsername: string
|
||||
captainGitHubUsername: string
|
||||
}
|
||||
|
||||
export interface InProgress extends ReleaseCaptainInformation {
|
||||
releases: ActiveReleaseDefinition[]
|
||||
srcCliVersion?: string
|
||||
googleExecutorVersion?: string
|
||||
awsExecutorVersion?: string
|
||||
}
|
||||
|
||||
export interface ReleaseConfig {
|
||||
metadata: {
|
||||
teamEmail: string
|
||||
slackAnnounceChannel: string
|
||||
}
|
||||
scheduledReleases: {
|
||||
[version: string]: ScheduledReleaseDefinition
|
||||
}
|
||||
in_progress?: InProgress
|
||||
dryRun: {
|
||||
tags?: boolean
|
||||
changesets?: boolean
|
||||
trackingIssues?: boolean
|
||||
slack?: boolean
|
||||
calendar?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface ScheduledReleaseDefinition extends ReleaseDates, ReleaseCaptainInformation {
|
||||
current: string
|
||||
}
|
||||
|
||||
// Prompt a user for input and activate the given release version if possible. Will redirect to release creation input if
|
||||
// the version isn't defined.
|
||||
export async function activateRelease(config: ReleaseConfig): Promise<void> {
|
||||
const next = await selectVersionWithSuggestion('Enter the version to activate')
|
||||
console.log('Attempting to detect previous version...')
|
||||
const previous = getPreviousVersion(next)
|
||||
console.log(chalk.blue(`Detected previous version: ${previous.version}`))
|
||||
|
||||
const scheduled = await getScheduledReleaseWithInput(config, next)
|
||||
config.in_progress = {
|
||||
captainGitHubUsername: scheduled.captainGitHubUsername,
|
||||
captainSlackUsername: scheduled.captainSlackUsername,
|
||||
releases: [{ version: next.version, previous: previous.version }],
|
||||
}
|
||||
saveReleaseConfig(config)
|
||||
console.log(chalk.green(`Release: ${next.version} activated!`))
|
||||
}
|
||||
|
||||
export function deactivateAllReleases(config: ReleaseConfig): void {
|
||||
delete config.in_progress
|
||||
saveReleaseConfig(config)
|
||||
}
|
||||
|
||||
// Prompt a user for a major / minor version input with automation suggestion by adding a minor version to the previous version.
|
||||
async function selectVersionWithSuggestion(prompt: string): Promise<SemVer> {
|
||||
const probablyMinor = getPreviousVersion().inc('minor')
|
||||
const probablyPatch = getPreviousVersion().inc('patch')
|
||||
const input = await retryInput(
|
||||
`Next minor release: ${probablyMinor.version}\nNext patch release: ${probablyPatch.version}\n${chalk.blue(
|
||||
prompt
|
||||
)}: `,
|
||||
val => {
|
||||
const version = semver.parse(val)
|
||||
return !!version
|
||||
}
|
||||
)
|
||||
return new SemVer(input)
|
||||
}
|
||||
|
||||
// Prompt a user for a release definition input, and redirect to creation input if it doesn't exist.
|
||||
export async function getReleaseDefinition(config: ReleaseConfig): Promise<ScheduledReleaseDefinition> {
|
||||
const next = await selectVersionWithSuggestion('Enter the version number to select')
|
||||
return getScheduledReleaseWithInput(config, next)
|
||||
}
|
||||
|
||||
// Helper function to get a release definition from the release config, redirecting to creation input if it doesn't exist.
|
||||
async function getScheduledReleaseWithInput(
|
||||
config: ReleaseConfig,
|
||||
releaseVersion: SemVer
|
||||
): Promise<ScheduledReleaseDefinition> {
|
||||
let scheduled = config.scheduledReleases[releaseVersion.version]
|
||||
if (!scheduled) {
|
||||
console.log(
|
||||
chalk.yellow(`Release definition not found for: ${releaseVersion.version}, enter release information.\n`)
|
||||
)
|
||||
scheduled = await newReleaseFromInput(releaseVersion)
|
||||
addScheduledRelease(config, scheduled)
|
||||
saveReleaseConfig(config)
|
||||
}
|
||||
return scheduled
|
||||
}
|
||||
|
||||
export function setSrcCliVersion(config: ReleaseConfig, version: string): void {
|
||||
if (config.in_progress) {
|
||||
config.in_progress.srcCliVersion = version
|
||||
}
|
||||
saveReleaseConfig(config)
|
||||
}
|
||||
|
||||
export function setGoogleExecutorVersion(config: ReleaseConfig, version: string): void {
|
||||
if (config.in_progress) {
|
||||
config.in_progress.googleExecutorVersion = version
|
||||
}
|
||||
saveReleaseConfig(config)
|
||||
}
|
||||
|
||||
export function setAWSExecutorVersion(config: ReleaseConfig, version: string): void {
|
||||
if (config.in_progress) {
|
||||
config.in_progress.awsExecutorVersion = version
|
||||
}
|
||||
saveReleaseConfig(config)
|
||||
}
|
||||
@ -1,64 +0,0 @@
|
||||
import execa from 'execa'
|
||||
import { SemVer } from 'semver'
|
||||
import * as semver from 'semver'
|
||||
|
||||
import { localSourcegraphRepo } from './github'
|
||||
|
||||
export function getTags(workdir: string, prefix?: string): string[] {
|
||||
execa.sync('git', ['fetch', '--tags'], { cwd: workdir })
|
||||
return execa
|
||||
.sync('git', ['--no-pager', 'tag', '-l', `${prefix}`, '--sort=v:refname'], { cwd: workdir })
|
||||
.stdout.split('\n')
|
||||
}
|
||||
|
||||
export function getCandidateTags(workdir: string, version: string): string[] {
|
||||
return getTags(workdir, `v${version}-rc*`)
|
||||
}
|
||||
|
||||
export function getReleaseTags(workdir: string, prefix: string): string[] {
|
||||
const raw = getTags(workdir, prefix)
|
||||
// since tags are globbed they can overmatch when we just want pure release tags
|
||||
return raw.filter(tag => tag.match('[0-9]+\\.[0-9]+\\.[0-9]+$'))
|
||||
}
|
||||
|
||||
const mainRepoTagPrefix = 'v[0-9]*.[0-9]*.[0-9]*'
|
||||
export const srcCliTagPrefix = '[0-9]*.[0-9]*.[0-9]*'
|
||||
export const executorTagPrefix = 'v[0-9]*.[0-9]*.[0-9]*'
|
||||
|
||||
// Returns the version tagged in the repository previous to a provided input version. If no input version it will
|
||||
// simply return the highest version found in the repository.
|
||||
export function getPreviousVersion(
|
||||
version?: SemVer,
|
||||
prefix: string = mainRepoTagPrefix,
|
||||
repoDir: string = localSourcegraphRepo
|
||||
): SemVer {
|
||||
const lowest = new SemVer('0.0.1')
|
||||
const tags = getReleaseTags(repoDir, prefix)
|
||||
if (tags.length === 0) {
|
||||
return lowest
|
||||
}
|
||||
if (!version) {
|
||||
return new SemVer(tags.at(-1))
|
||||
}
|
||||
|
||||
for (
|
||||
let reallyLongVariableNameBecauseESLintRulesAreSilly = tags.length - 1;
|
||||
reallyLongVariableNameBecauseESLintRulesAreSilly >= 0;
|
||||
reallyLongVariableNameBecauseESLintRulesAreSilly--
|
||||
) {
|
||||
const tag = tags[reallyLongVariableNameBecauseESLintRulesAreSilly]
|
||||
const temp = semver.parse(tag)
|
||||
if (temp && temp.compare(version) === -1) {
|
||||
return temp
|
||||
}
|
||||
}
|
||||
return lowest
|
||||
}
|
||||
|
||||
export function getPreviousVersionSrcCli(path: string): SemVer {
|
||||
return getPreviousVersion(undefined, srcCliTagPrefix, path)
|
||||
}
|
||||
|
||||
export function getPreviousVersionExecutor(path: string): SemVer {
|
||||
return getPreviousVersion(undefined, executorTagPrefix, path)
|
||||
}
|
||||
@ -1,747 +0,0 @@
|
||||
import { existsSync, mkdtemp as original_mkdtemp, readFileSync, copyFileSync } from 'fs'
|
||||
import * as os from 'os'
|
||||
import * as path from 'path'
|
||||
import { promisify } from 'util'
|
||||
|
||||
import Octokit, { type IssuesAddLabelsParams } from '@octokit/rest'
|
||||
import commandExists from 'command-exists'
|
||||
import execa from 'execa'
|
||||
import fetch from 'node-fetch'
|
||||
import * as semver from 'semver'
|
||||
|
||||
import type { ActiveRelease } from './config'
|
||||
import { cacheFolder, changelogURL, formatDate, getContainerRegistryCredential, readLine, timezoneLink } from './util'
|
||||
|
||||
const mkdtemp = promisify(original_mkdtemp)
|
||||
let githubPAT: string
|
||||
|
||||
export async function getAuthenticatedGitHubClient(): Promise<Octokit> {
|
||||
const cacheFile = `${cacheFolder}/github.txt`
|
||||
if (existsSync(cacheFile) && (await validateToken()) === true) {
|
||||
githubPAT = readFileSync(`${cacheFolder}/github.txt`, 'utf-8')
|
||||
} else {
|
||||
githubPAT = await readLine(
|
||||
'Enter a GitHub personal access token with "repo" scope (https://github.com/settings/tokens/new): ',
|
||||
cacheFile
|
||||
)
|
||||
}
|
||||
|
||||
const trimmedGithubPAT = githubPAT.trim()
|
||||
return new Octokit({ auth: trimmedGithubPAT })
|
||||
}
|
||||
|
||||
/**
|
||||
* releaseName generates a standardized format for referring to releases.
|
||||
*/
|
||||
export function releaseName(release: semver.SemVer): string {
|
||||
return `${release.major}.${release.minor}${release.patch !== 0 ? `.${release.patch}` : ''}`
|
||||
}
|
||||
|
||||
export enum IssueLabel {
|
||||
// https://github.com/sourcegraph/sourcegraph/labels/release-tracking
|
||||
RELEASE_TRACKING = 'release-tracking',
|
||||
// https://github.com/sourcegraph/sourcegraph/labels/patch-release-request
|
||||
PATCH_REQUEST = 'patch-release-request',
|
||||
|
||||
// New labels to better distinguish release-tracking issues
|
||||
RELEASE = 'release',
|
||||
PATCH = 'patch',
|
||||
MANAGED = 'managed-instances',
|
||||
DEVOPS_TEAM = 'team/devops',
|
||||
SECURITY_TEAM = 'team/security',
|
||||
RELEASE_BLOCKER = 'release-blocker',
|
||||
}
|
||||
|
||||
enum IssueTitleSuffix {
|
||||
RELEASE_TRACKING = 'release tracking issue',
|
||||
PATCH_TRACKING = 'patch release tracking issue',
|
||||
MANAGED_TRACKING = 'upgrade managed instances tracking issue',
|
||||
SECURITY_TRACKING = 'container image vulnerability assessment tracking issue',
|
||||
}
|
||||
|
||||
/**
|
||||
* Template used to generate tracking issue
|
||||
*/
|
||||
interface IssueTemplate {
|
||||
owner: string
|
||||
repo: string
|
||||
/**
|
||||
* Relative path to markdown file containing template body.
|
||||
*
|
||||
* Template bodies can leverage arguments as described in `IssueTemplateArguments` docstrings.
|
||||
*/
|
||||
path: string
|
||||
/**
|
||||
* Title for issue.
|
||||
*/
|
||||
titleSuffix: IssueTitleSuffix
|
||||
/**
|
||||
* Labels to apply on issues.
|
||||
*/
|
||||
labels: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Arguments available for rendering IssueTemplate
|
||||
*/
|
||||
interface IssueTemplateArguments {
|
||||
/**
|
||||
* Available as `$MAJOR`, `$MINOR`, and `$PATCH`
|
||||
*/
|
||||
version: semver.SemVer
|
||||
/**
|
||||
* Available as `$SECURITY_REVIEW_DATE`
|
||||
*/
|
||||
securityReviewDate: Date
|
||||
/**
|
||||
* Available as `$CODE_FREEZE_DATE`
|
||||
*/
|
||||
codeFreezeDate: Date
|
||||
/**
|
||||
* Available as `$RELEASE_DATE`
|
||||
*/
|
||||
releaseDate: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure templates for the release tool to generate issues with.
|
||||
*
|
||||
* Ensure these templates are up to date with the state of the tooling and release processes.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
||||
const getTemplates = () => {
|
||||
const releaseIssue: IssueTemplate = {
|
||||
owner: 'sourcegraph',
|
||||
repo: 'sourcegraph',
|
||||
path: 'dev/release/templates/release_issue_template.md',
|
||||
titleSuffix: IssueTitleSuffix.RELEASE_TRACKING,
|
||||
labels: [IssueLabel.RELEASE_TRACKING, IssueLabel.RELEASE],
|
||||
}
|
||||
const patchReleaseIssue: IssueTemplate = {
|
||||
owner: 'sourcegraph',
|
||||
repo: 'sourcegraph',
|
||||
path: 'dev/release/templates/patch_release_issue_template.md',
|
||||
titleSuffix: IssueTitleSuffix.PATCH_TRACKING,
|
||||
labels: [IssueLabel.RELEASE_TRACKING, IssueLabel.PATCH],
|
||||
}
|
||||
const securityAssessmentIssue: IssueTemplate = {
|
||||
owner: 'sourcegraph',
|
||||
repo: 'sourcegraph',
|
||||
path: 'dev/release/templates/security_assessment.md',
|
||||
titleSuffix: IssueTitleSuffix.SECURITY_TRACKING,
|
||||
labels: [IssueLabel.RELEASE_TRACKING, IssueLabel.SECURITY_TEAM, IssueLabel.RELEASE_BLOCKER],
|
||||
}
|
||||
return { releaseIssue, patchReleaseIssue, securityAssessmentIssue }
|
||||
}
|
||||
|
||||
function dateMarkdown(date: Date, name: string): string {
|
||||
return `[${formatDate(date)}](${timezoneLink(date, name)})`
|
||||
}
|
||||
|
||||
async function execTemplate(
|
||||
octokit: Octokit,
|
||||
template: IssueTemplate,
|
||||
{ version, securityReviewDate, codeFreezeDate, releaseDate }: IssueTemplateArguments
|
||||
): Promise<string> {
|
||||
console.log(`Preparing issue from ${JSON.stringify(template)}`)
|
||||
const name = releaseName(version)
|
||||
const content = await getContent(octokit, template)
|
||||
return content
|
||||
.replaceAll('$MAJOR', version.major.toString())
|
||||
.replaceAll('$MINOR', version.minor.toString())
|
||||
.replaceAll('$PATCH', version.patch.toString())
|
||||
.replaceAll(
|
||||
'$SECURITY_REVIEW_DATE',
|
||||
dateMarkdown(securityReviewDate, `One working week before ${name} release`)
|
||||
)
|
||||
.replaceAll('$CODE_FREEZE_DATE', dateMarkdown(codeFreezeDate, `Three working days before ${name} release`))
|
||||
.replaceAll('$RELEASE_DATE', dateMarkdown(releaseDate, `${name} release date`))
|
||||
}
|
||||
|
||||
interface MaybeIssue {
|
||||
title: string
|
||||
url: string
|
||||
number: number
|
||||
created: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures tracking issues for the given release.
|
||||
*
|
||||
* The first returned issue is considered the parent issue.
|
||||
*/
|
||||
export async function ensureTrackingIssues({
|
||||
version,
|
||||
assignees,
|
||||
releaseDate,
|
||||
securityReviewDate,
|
||||
codeFreezeDate,
|
||||
dryRun,
|
||||
}: {
|
||||
version: semver.SemVer
|
||||
assignees: string[]
|
||||
releaseDate: Date
|
||||
securityReviewDate: Date
|
||||
codeFreezeDate: Date
|
||||
dryRun: boolean
|
||||
}): Promise<MaybeIssue[]> {
|
||||
const octokit = await getAuthenticatedGitHubClient()
|
||||
const templates = getTemplates()
|
||||
const release = releaseName(version)
|
||||
|
||||
// Determine what issues to generate. The first issue is considered the "main"
|
||||
// tracking issue, and subsequent issues will contain references to it.
|
||||
let issueTemplates: IssueTemplate[]
|
||||
if (version.patch === 0) {
|
||||
issueTemplates = [templates.releaseIssue]
|
||||
} else {
|
||||
issueTemplates = [templates.patchReleaseIssue]
|
||||
}
|
||||
|
||||
// Release milestones are not as emphasised now as they used to be, since most teams
|
||||
// use sprints shorter than releases to track their work. For reference, if one is
|
||||
// available we apply it to this tracking issue, otherwise just leave it without a
|
||||
// milestone.
|
||||
let milestoneNumber: number | undefined
|
||||
const milestone = await getReleaseMilestone(octokit, version)
|
||||
if (!milestone) {
|
||||
console.log(`Milestone ${release} is closed or not found — omitting from issue.`)
|
||||
} else {
|
||||
milestoneNumber = milestone ? milestone.number : undefined
|
||||
}
|
||||
|
||||
// Create issues
|
||||
let parentIssue: MaybeIssue | undefined
|
||||
const created: MaybeIssue[] = []
|
||||
for (const template of issueTemplates) {
|
||||
const body = await execTemplate(octokit, template, {
|
||||
version,
|
||||
releaseDate,
|
||||
securityReviewDate,
|
||||
codeFreezeDate,
|
||||
})
|
||||
const issue = await ensureIssue(
|
||||
octokit,
|
||||
{
|
||||
title: trackingIssueTitle(version, template),
|
||||
labels: template.labels,
|
||||
body: parentIssue ? `${body}\n\n---\n\nAlso see [${parentIssue.title}](${parentIssue.url})` : body,
|
||||
assignees,
|
||||
owner: 'sourcegraph',
|
||||
repo: 'sourcegraph',
|
||||
milestone: milestoneNumber,
|
||||
},
|
||||
dryRun
|
||||
)
|
||||
// if this is the first issue, we treat it as the parent issue
|
||||
if (!parentIssue) {
|
||||
parentIssue = { ...issue }
|
||||
}
|
||||
created.push({ ...issue })
|
||||
}
|
||||
return created
|
||||
}
|
||||
|
||||
async function getContent(
|
||||
octokit: Octokit,
|
||||
parameters: {
|
||||
owner: string
|
||||
repo: string
|
||||
path: string
|
||||
}
|
||||
): Promise<string> {
|
||||
const response = await octokit.repos.getContents(parameters)
|
||||
if (Array.isArray(response.data)) {
|
||||
throw new TypeError(`${parameters.path} is a directory`)
|
||||
}
|
||||
return Buffer.from(response.data.content as string, 'base64').toString()
|
||||
}
|
||||
|
||||
async function ensureIssue(
|
||||
octokit: Octokit,
|
||||
{
|
||||
title,
|
||||
owner,
|
||||
repo,
|
||||
assignees,
|
||||
body,
|
||||
milestone,
|
||||
labels,
|
||||
}: {
|
||||
title: string
|
||||
owner: string
|
||||
repo: string
|
||||
assignees: string[]
|
||||
body: string
|
||||
milestone?: number
|
||||
labels: string[]
|
||||
},
|
||||
dryRun: boolean
|
||||
): Promise<MaybeIssue> {
|
||||
const issueData = {
|
||||
title,
|
||||
owner,
|
||||
repo,
|
||||
assignees,
|
||||
milestone,
|
||||
labels,
|
||||
}
|
||||
const issue = await getIssueByTitle(octokit, title, labels)
|
||||
if (issue) {
|
||||
return { title, url: issue.url, number: issue.number, created: false }
|
||||
}
|
||||
if (dryRun) {
|
||||
console.log('Dry run enabled, skipping issue creation')
|
||||
console.log(`Issue that would have been created:\n${JSON.stringify(issueData, null, 1)}`)
|
||||
console.log(`With body: ${body}`)
|
||||
return { title, url: '', number: 0, created: false }
|
||||
}
|
||||
const createdIssue = await octokit.issues.create({ body, ...issueData })
|
||||
return { title, url: createdIssue.data.html_url, number: createdIssue.data.number, created: true }
|
||||
}
|
||||
|
||||
export async function listIssues(
|
||||
octokit: Octokit,
|
||||
query: string
|
||||
): Promise<Octokit.SearchIssuesAndPullRequestsResponseItemsItem[]> {
|
||||
return (await octokit.search.issuesAndPullRequests({ per_page: 100, q: query })).data.items
|
||||
}
|
||||
|
||||
export interface Issue {
|
||||
title: string
|
||||
number: number
|
||||
url: string
|
||||
|
||||
// Repository
|
||||
owner: string
|
||||
repo: string
|
||||
}
|
||||
|
||||
export async function getTrackingIssue(client: Octokit, release: semver.SemVer): Promise<Issue | null> {
|
||||
const templates = getTemplates()
|
||||
const template = release.patch ? templates.patchReleaseIssue : templates.releaseIssue
|
||||
return getIssueByTitle(client, trackingIssueTitle(release, template), template.labels)
|
||||
}
|
||||
|
||||
function trackingIssueTitle(release: semver.SemVer, template: IssueTemplate): string {
|
||||
return `${release.version} ${template.titleSuffix}`
|
||||
}
|
||||
|
||||
export async function commentOnIssue(client: Octokit, issue: Issue, body: string): Promise<string> {
|
||||
const comment = await client.issues.createComment({
|
||||
body,
|
||||
issue_number: issue.number,
|
||||
owner: issue.owner,
|
||||
repo: issue.repo,
|
||||
})
|
||||
return comment.data.html_url
|
||||
}
|
||||
|
||||
async function closeIssue(client: Octokit, issue: Issue): Promise<void> {
|
||||
await client.issues.update({
|
||||
state: 'closed',
|
||||
issue_number: issue.number,
|
||||
owner: issue.owner,
|
||||
repo: issue.repo,
|
||||
})
|
||||
}
|
||||
|
||||
interface Milestone {
|
||||
number: number
|
||||
url: string
|
||||
|
||||
// Repository
|
||||
owner: string
|
||||
repo: string
|
||||
}
|
||||
|
||||
async function getReleaseMilestone(client: Octokit, release: semver.SemVer): Promise<Milestone | null> {
|
||||
const owner = 'sourcegraph'
|
||||
const repo = 'sourcegraph'
|
||||
const milestoneTitle = releaseName(release)
|
||||
const milestones = await client.issues.listMilestonesForRepo({
|
||||
owner,
|
||||
repo,
|
||||
per_page: 100,
|
||||
direction: 'desc',
|
||||
})
|
||||
const milestone = milestones.data.filter(milestone => milestone.title === milestoneTitle)
|
||||
return milestone.length > 0
|
||||
? {
|
||||
number: milestone[0].number,
|
||||
url: milestone[0].html_url,
|
||||
owner,
|
||||
repo,
|
||||
}
|
||||
: null
|
||||
}
|
||||
|
||||
export async function queryIssues(octokit: Octokit, titleQuery: string, labels: string[]): Promise<Issue[]> {
|
||||
const owner = 'sourcegraph'
|
||||
const repo = 'sourcegraph'
|
||||
const response = await octokit.search.issuesAndPullRequests({
|
||||
per_page: 100,
|
||||
q: `type:issue repo:${owner}/${repo} is:open ${labels
|
||||
.map(label => `label:${label}`)
|
||||
.join(' ')} ${JSON.stringify(titleQuery)}`,
|
||||
})
|
||||
return response.data.items.map(item => ({
|
||||
title: item.title,
|
||||
number: item.number,
|
||||
url: item.html_url,
|
||||
owner,
|
||||
repo,
|
||||
}))
|
||||
}
|
||||
|
||||
async function getIssueByTitle(octokit: Octokit, title: string, labels: string[]): Promise<Issue | null> {
|
||||
const matchingIssues = (await queryIssues(octokit, title, labels)).filter(issue => issue.title === title)
|
||||
if (matchingIssues.length === 0) {
|
||||
return null
|
||||
}
|
||||
if (matchingIssues.length > 1) {
|
||||
throw new Error(`Multiple issues matched issue title ${JSON.stringify(title)}`)
|
||||
}
|
||||
return matchingIssues[0]
|
||||
}
|
||||
|
||||
export type EditFunc = (d: string) => void
|
||||
|
||||
export type Edit = string | EditFunc
|
||||
|
||||
export interface CreateBranchWithChangesOptions {
|
||||
owner: string
|
||||
repo: string
|
||||
base: string
|
||||
head: string
|
||||
commitMessage: string
|
||||
edits: Edit[]
|
||||
dryRun?: boolean
|
||||
}
|
||||
|
||||
export interface ChangesetsOptions {
|
||||
requiredCommands: string[]
|
||||
changes: (Octokit.PullsCreateParams & CreateBranchWithChangesOptions & { labels?: string[] })[]
|
||||
dryRun?: boolean
|
||||
}
|
||||
|
||||
export interface CreatedChangeset {
|
||||
repository: string
|
||||
branch: string
|
||||
pullRequestURL: string
|
||||
pullRequestNumber: number
|
||||
}
|
||||
|
||||
export async function createChangesets(options: ChangesetsOptions): Promise<CreatedChangeset[]> {
|
||||
// Overwriting `process.env` may not be a good practice,
|
||||
// but it's the easiest way to avoid making changes all over the place
|
||||
const dockerHubCredential = await getContainerRegistryCredential('index.docker.io')
|
||||
process.env.CR_USERNAME = dockerHubCredential.username
|
||||
process.env.CR_PASSWORD = dockerHubCredential.password
|
||||
for (const command of options.requiredCommands) {
|
||||
try {
|
||||
await commandExists(command)
|
||||
} catch {
|
||||
throw new Error(`Required command ${command} does not exist`)
|
||||
}
|
||||
}
|
||||
const octokit = await getAuthenticatedGitHubClient()
|
||||
if (options.dryRun) {
|
||||
console.log('Changesets dry run enabled - diffs and pull requests will be printed instead')
|
||||
} else {
|
||||
console.log('Generating changes and publishing as pull requests')
|
||||
}
|
||||
|
||||
// Generate and push changes. We abort here if a repo fails because it should be safe
|
||||
// to re-run changesets, which force push changes to a PR branch.
|
||||
for (const change of options.changes) {
|
||||
const repository = `${change.owner}/${change.repo}`
|
||||
console.log(`${repository}: Preparing change for on '${change.base}' to '${change.head}'`)
|
||||
await createBranchWithChanges(octokit, { ...change, dryRun: options.dryRun })
|
||||
}
|
||||
|
||||
// Publish changes as pull requests only if all changes are successfully created. We
|
||||
// continue on error for errors when publishing.
|
||||
const results: CreatedChangeset[] = []
|
||||
let publishChangesFailed = false
|
||||
for (const change of options.changes) {
|
||||
const repository = `${change.owner}/${change.repo}`
|
||||
console.log(`${repository}: Preparing pull request for change from '${change.base}' to '${change.head}':
|
||||
|
||||
Title: ${change.title}
|
||||
Body: ${change.body || 'none'}`)
|
||||
let pullRequest: { url: string; number: number } = { url: '', number: -1 }
|
||||
try {
|
||||
if (!options.dryRun) {
|
||||
pullRequest = await createPR(octokit, change)
|
||||
if (change.labels) {
|
||||
await octokit.issues.addLabels({
|
||||
issue_number: pullRequest.number,
|
||||
repo: change.repo,
|
||||
owner: change.owner,
|
||||
labels: change.labels,
|
||||
} as IssuesAddLabelsParams)
|
||||
}
|
||||
}
|
||||
|
||||
results.push({
|
||||
repository,
|
||||
branch: change.base,
|
||||
pullRequestURL: pullRequest.url,
|
||||
pullRequestNumber: pullRequest.number,
|
||||
})
|
||||
} catch (error) {
|
||||
publishChangesFailed = true
|
||||
console.error(error)
|
||||
console.error(`Failed to create pull request for change in ${repository}`, { change })
|
||||
}
|
||||
}
|
||||
|
||||
// Log results
|
||||
for (const result of results) {
|
||||
console.log(`${result.repository} (${result.branch}): created pull request ${result.pullRequestURL}`)
|
||||
}
|
||||
if (publishChangesFailed) {
|
||||
throw new Error('Error occured applying some changes - please check log output')
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export async function cloneRepo(
|
||||
octokit: Octokit,
|
||||
owner: string,
|
||||
repo: string,
|
||||
checkout: {
|
||||
revision: string
|
||||
revisionMustExist?: boolean
|
||||
}
|
||||
): Promise<{
|
||||
workdir: string
|
||||
}> {
|
||||
const tmpdir = await mkdtemp(path.join(os.tmpdir(), `sg-release-${owner}-${repo}-`))
|
||||
console.log(`Created temp directory ${tmpdir}`)
|
||||
const fetchFlags = '--depth 1'
|
||||
|
||||
// Determine whether or not to create the base branch, or use the existing one
|
||||
let revisionExists = true
|
||||
if (!checkout.revisionMustExist) {
|
||||
try {
|
||||
await octokit.repos.getBranch({ branch: checkout.revision, owner, repo })
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
console.log(`Target revision ${checkout.revision} does not exist, this branch will be created`)
|
||||
revisionExists = false
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
const checkoutCommand =
|
||||
revisionExists === true
|
||||
? // for an existing branch - fetch fails if we are already checked out, so ignore errors optimistically
|
||||
`git fetch ${fetchFlags} origin ${checkout.revision}:${checkout.revision} || true ; git checkout ${checkout.revision}`
|
||||
: // create from HEAD and publish base branch if it does not yet exist
|
||||
`git checkout -b ${checkout.revision} ; git push origin ${checkout.revision}:${checkout.revision}`
|
||||
|
||||
// PERF: if we have a local clone using reference avoids needing to fetch
|
||||
// all the objects from the remote. We assume the local clone will exist
|
||||
// in the same directory as the current sourcegraph/sourcegraph clone.
|
||||
const cloneFlags = `${fetchFlags} --reference-if-able ${localSourcegraphRepo}/../${repo}`
|
||||
|
||||
// Set up repository
|
||||
const setupScript = `set -ex
|
||||
|
||||
git clone ${cloneFlags} git@github.com:${owner}/${repo} || git clone ${cloneFlags} https://github.com/${owner}/${repo};
|
||||
cd ${repo};
|
||||
${checkoutCommand};`
|
||||
await execa('bash', ['-c', setupScript], { stdio: 'inherit', cwd: tmpdir })
|
||||
return {
|
||||
workdir: path.join(tmpdir, repo),
|
||||
}
|
||||
}
|
||||
|
||||
export const localSourcegraphRepo = `${process.cwd()}/../..`
|
||||
|
||||
async function createBranchWithChanges(
|
||||
octokit: Octokit,
|
||||
{ owner, repo, base: baseRevision, head: headBranch, commitMessage, edits, dryRun }: CreateBranchWithChangesOptions
|
||||
): Promise<void> {
|
||||
// Set up repository
|
||||
const { workdir } = await cloneRepo(octokit, owner, repo, { revision: baseRevision })
|
||||
|
||||
// Bazel depends on configuration in the sourcegraph repo. So to run it in
|
||||
// our temporary clone we need to copy those files over.
|
||||
if (owner === 'sourcegraph' && repo === 'sourcegraph') {
|
||||
// All the try-import files from .bazelrc
|
||||
for (const name of ['.aspect/bazelrc/user.bazelrc', 'user.bazelrc', '.bazelrc-nix']) {
|
||||
const src = `${localSourcegraphRepo}/${name}`
|
||||
const dest = `${workdir}/${name}`
|
||||
if (existsSync(src)) {
|
||||
copyFileSync(src, dest)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply edits
|
||||
for (const edit of edits) {
|
||||
switch (typeof edit) {
|
||||
case 'function': {
|
||||
edit(workdir)
|
||||
break
|
||||
}
|
||||
case 'string': {
|
||||
const editScript = `set -ex
|
||||
|
||||
${edit};`
|
||||
await execa('bash', ['-c', editScript], { stdio: 'inherit', cwd: workdir })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dryRun) {
|
||||
console.warn('Dry run enabled - printing diff instead of publishing')
|
||||
const showChangesScript = `set -ex
|
||||
|
||||
git --no-pager diff;`
|
||||
await execa('bash', ['-c', showChangesScript], { stdio: 'inherit', cwd: workdir })
|
||||
} else {
|
||||
// Publish changes. We force push to ensure that the generated changes are applied.
|
||||
const publishScript = `set -ex
|
||||
|
||||
git add :/;
|
||||
git commit -a -m ${JSON.stringify(commitMessage)};
|
||||
git push --force origin HEAD:${headBranch};`
|
||||
await execa('bash', ['-c', publishScript], { stdio: 'inherit', cwd: workdir })
|
||||
}
|
||||
}
|
||||
|
||||
async function createPR(
|
||||
octokit: Octokit,
|
||||
options: {
|
||||
owner: string
|
||||
repo: string
|
||||
head: string
|
||||
base: string
|
||||
title: string
|
||||
body?: string
|
||||
}
|
||||
): Promise<{ url: string; number: number }> {
|
||||
const response = await octokit.pulls.create(options)
|
||||
return {
|
||||
url: response.data.html_url,
|
||||
number: response.data.number,
|
||||
}
|
||||
}
|
||||
|
||||
export interface TagOptions {
|
||||
owner: string
|
||||
repo: string
|
||||
branch: string
|
||||
tag: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a tag on a remote branch for the given repository.
|
||||
*
|
||||
* The target branch must exist on the remote.
|
||||
*/
|
||||
export async function createTag(
|
||||
octokit: Octokit,
|
||||
workdir: string,
|
||||
{ owner, repo, branch: rawBranch, tag: rawTag }: TagOptions,
|
||||
dryRun: boolean
|
||||
): Promise<void> {
|
||||
const branch = JSON.stringify(rawBranch)
|
||||
const tag = JSON.stringify(rawTag)
|
||||
const finalizeTag = dryRun ? `git --no-pager show ${tag} --no-patch` : `git push origin ${tag}`
|
||||
if (dryRun) {
|
||||
console.log(`Dry-run enabled - creating and printing tag ${tag} on ${owner}/${repo}@${branch}`)
|
||||
return
|
||||
}
|
||||
console.log(`Creating and pushing tag ${tag} on ${owner}/${repo}@${branch}`)
|
||||
await execa('bash', ['-c', `git tag -a ${tag} -m ${tag} && ${finalizeTag}`], { stdio: 'inherit', cwd: workdir })
|
||||
}
|
||||
|
||||
// createLatestRelease generates a GitHub release iff this release is the latest and
|
||||
// greatest, otherwise it is a no-op.
|
||||
export async function createLatestRelease(
|
||||
octokit: Octokit,
|
||||
{ owner, repo, release }: { owner: string; repo: string; release: semver.SemVer },
|
||||
dryRun?: boolean
|
||||
): Promise<string> {
|
||||
const latest = await octokit.repos.getLatestRelease({
|
||||
owner,
|
||||
repo,
|
||||
})
|
||||
const latestTag = latest.data.tag_name
|
||||
if (semver.gt(latestTag.startsWith('v') ? latestTag.slice(1) : latestTag, release)) {
|
||||
// if latest is greater than release, do not generate a release
|
||||
console.log(`Latest release ${latestTag} is more recent than ${release.version}, skipping GitHub release`)
|
||||
return ''
|
||||
}
|
||||
|
||||
const updateURL = 'https://sourcegraph.com/docs/admin/updates'
|
||||
const releasePostURL = `https://sourcegraph.com/blog/release/${release.major}.${release.minor}` // CI:URL_OK
|
||||
|
||||
const request: Octokit.RequestOptions & Octokit.ReposCreateReleaseParams = {
|
||||
owner,
|
||||
repo,
|
||||
tag_name: `v${release.version}`,
|
||||
name: `Sourcegraph ${release.version}`,
|
||||
prerelease: false,
|
||||
draft: false,
|
||||
body: `Sourcegraph ${release.version} is now available!
|
||||
|
||||
- [Changelog](${changelogURL(release.format())})
|
||||
- [Update](${updateURL})
|
||||
- [Release post](${releasePostURL}) (might not be available immediately upon release)
|
||||
`,
|
||||
}
|
||||
if (dryRun) {
|
||||
console.log('Skipping GitHub release, parameters:', request)
|
||||
return ''
|
||||
}
|
||||
const response = await octokit.repos.createRelease(request)
|
||||
return response.data.html_url
|
||||
}
|
||||
|
||||
async function validateToken(): Promise<boolean> {
|
||||
const githubPAT: string = readFileSync(`${cacheFolder}/github.txt`, 'utf-8')
|
||||
const trimmedGithubPAT = githubPAT.trim()
|
||||
const response = await fetch('https://api.github.com/repos/sourcegraph/sourcegraph', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `token ${trimmedGithubPAT}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (response.status !== 200) {
|
||||
console.log(`Existing GitHub token is invalid, got status ${response.statusText}`)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
export async function closeTrackingIssue(version: semver.SemVer): Promise<void> {
|
||||
const octokit = await getAuthenticatedGitHubClient()
|
||||
const release = releaseName(version)
|
||||
const labels = [IssueLabel.RELEASE_TRACKING, IssueLabel.RELEASE]
|
||||
// close old tracking issue
|
||||
const previous = await queryIssues(octokit, release, labels)
|
||||
for (const previousIssue of previous) {
|
||||
const comment = await commentOnIssue(
|
||||
octokit,
|
||||
previousIssue,
|
||||
`Issue closed by release tool. #${previousIssue.number}`
|
||||
)
|
||||
console.log(`Closing #${previousIssue.number} '${previousIssue.title} with ${comment}`)
|
||||
await closeIssue(octokit, previousIssue)
|
||||
}
|
||||
}
|
||||
|
||||
export const releaseBlockerLabel = 'release-blocker'
|
||||
|
||||
export function getBackportLabelForRelease(release: ActiveRelease): string {
|
||||
return `backport ${release.branch}`
|
||||
}
|
||||
@ -1,170 +0,0 @@
|
||||
import { createServer, type IncomingMessage, type Server, type ServerResponse } from 'http'
|
||||
import type { AddressInfo } from 'net'
|
||||
|
||||
import { addMinutes } from 'date-fns'
|
||||
import type { Credentials } from 'google-auth-library'
|
||||
import { google, type calendar_v3 } from 'googleapis'
|
||||
import { OAuth2Client } from 'googleapis-common'
|
||||
import { DateTime } from 'luxon'
|
||||
import { readFile, writeFile } from 'mz/fs'
|
||||
import open from 'open'
|
||||
|
||||
import { readLine, cacheFolder } from './util'
|
||||
|
||||
export interface Installed {
|
||||
client_id?: string
|
||||
client_secret?: string
|
||||
redirect_uri?: string
|
||||
}
|
||||
export interface OAuth2ClientOptions {
|
||||
installed: Installed
|
||||
}
|
||||
|
||||
const SCOPES = ['https://www.googleapis.com/auth/calendar.events']
|
||||
const TOKEN_PATH = `${cacheFolder}/google-calendar-token.json`
|
||||
|
||||
export async function getClient(): Promise<OAuth2Client> {
|
||||
const credentials: OAuth2ClientOptions = JSON.parse(
|
||||
await readLine(
|
||||
'Paste Google Calendar credentials (1Password "Release automation Google Calendar API App credentials"): ',
|
||||
`${cacheFolder}/google-calendar-credentials.json`
|
||||
)
|
||||
)
|
||||
const oauth2Client = await authorize(credentials)
|
||||
return oauth2Client
|
||||
}
|
||||
async function authorize(credentials: OAuth2ClientOptions): Promise<OAuth2Client> {
|
||||
let oauth2Client: OAuth2Client
|
||||
try {
|
||||
const token = await getAccessCachedToken()
|
||||
oauth2Client = new OAuth2Client({
|
||||
clientId: credentials.installed.client_id,
|
||||
clientSecret: credentials.installed.client_secret,
|
||||
redirectUri: credentials.installed.redirect_uri,
|
||||
})
|
||||
oauth2Client.setCredentials(token)
|
||||
return oauth2Client
|
||||
} catch {
|
||||
const server = await new Promise<Server>(resolve => {
|
||||
const serv = createServer()
|
||||
serv.listen(0, () => resolve(serv))
|
||||
})
|
||||
const { port } = server.address() as AddressInfo
|
||||
const oauth2Client = new OAuth2Client({
|
||||
clientId: credentials.installed.client_id,
|
||||
clientSecret: credentials.installed.client_secret,
|
||||
redirectUri: `http://localhost:${port}`,
|
||||
})
|
||||
|
||||
const token = await getAccessTokenNoCache(server, oauth2Client)
|
||||
await writeFile(TOKEN_PATH, JSON.stringify(token))
|
||||
oauth2Client.setCredentials(token)
|
||||
server.close()
|
||||
return oauth2Client
|
||||
}
|
||||
}
|
||||
|
||||
async function getAccessCachedToken(): Promise<Credentials> {
|
||||
const content = await readFile(TOKEN_PATH, { encoding: 'utf8' })
|
||||
return JSON.parse(content)
|
||||
}
|
||||
|
||||
async function getAccessTokenNoCache(server: Server, oauth2Client: OAuth2Client): Promise<Credentials> {
|
||||
const authUrl = oauth2Client.generateAuthUrl({
|
||||
access_type: 'offline',
|
||||
scope: SCOPES,
|
||||
})
|
||||
|
||||
const authCode = await new Promise<string>((resolve, reject) => {
|
||||
server.on('request', (request: IncomingMessage, response: ServerResponse) => {
|
||||
try {
|
||||
const urlParts = new URL(request.url ?? '', 'http://localhost').searchParams
|
||||
const code = urlParts.get('code')
|
||||
const error = urlParts.get('error')
|
||||
if (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
if (code) {
|
||||
resolve(code)
|
||||
}
|
||||
response.end('Authentication successful! Please return to the console')
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
})
|
||||
open(authUrl, { wait: false })
|
||||
.then(childProcess => childProcess.unref())
|
||||
.catch(reject)
|
||||
})
|
||||
|
||||
const { tokens } = await oauth2Client.getToken(authCode)
|
||||
return tokens
|
||||
}
|
||||
|
||||
export interface EventOptions {
|
||||
anyoneCanAddSelf?: boolean
|
||||
attendees?: string[]
|
||||
startDate?: string
|
||||
endDate?: string
|
||||
startDateTime?: string
|
||||
endDateTime?: string
|
||||
description?: string
|
||||
title: string
|
||||
transparency: string
|
||||
}
|
||||
|
||||
export async function ensureEvent(
|
||||
{
|
||||
anyoneCanAddSelf = false,
|
||||
attendees = [],
|
||||
startDate,
|
||||
endDate,
|
||||
startDateTime,
|
||||
endDateTime,
|
||||
description = '',
|
||||
title,
|
||||
transparency,
|
||||
}: EventOptions,
|
||||
auth: OAuth2Client
|
||||
): Promise<void> {
|
||||
const existingEvents = await listEvents(auth)
|
||||
const foundEvents = (existingEvents || []).filter(({ summary }) => summary === title)
|
||||
if (foundEvents.length > 0) {
|
||||
console.log(`Event ${JSON.stringify(title)} already exists (not updating)`)
|
||||
return
|
||||
}
|
||||
|
||||
const calendar = google.calendar({ version: 'v3', auth })
|
||||
await calendar.events.insert({
|
||||
calendarId: 'primary',
|
||||
requestBody: {
|
||||
anyoneCanAddSelf,
|
||||
attendees: attendees.map(email => ({ email, optional: true })),
|
||||
start: { date: startDate, dateTime: startDateTime },
|
||||
end: { date: endDate, dateTime: endDateTime },
|
||||
description,
|
||||
summary: title,
|
||||
transparency,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export async function listEvents(auth: OAuth2Client): Promise<calendar_v3.Schema$Event[] | undefined> {
|
||||
const calendar = google.calendar({ version: 'v3', auth })
|
||||
const result = await calendar.events.list({
|
||||
calendarId: 'primary',
|
||||
timeMin: new Date().toISOString(),
|
||||
timeMax: DateTime.now().plus({ year: 1 }).toJSDate().toISOString(), // this ends up returning a lot of events, so filtering down to the next year should be fine
|
||||
maxResults: 2500,
|
||||
singleEvents: true,
|
||||
orderBy: 'startTime',
|
||||
})
|
||||
return result.data.items
|
||||
}
|
||||
|
||||
export function calendarTime(date: string): { startDateTime: string; endDateTime: string } {
|
||||
return {
|
||||
startDateTime: new Date(date).toISOString(),
|
||||
endDateTime: addMinutes(new Date(date), 1).toISOString(),
|
||||
}
|
||||
}
|
||||
@ -1,23 +0,0 @@
|
||||
import { loadReleaseConfig } from './config'
|
||||
import { runStep, type StepID } from './release'
|
||||
import { ensureMainBranchUpToDate } from './util'
|
||||
|
||||
/**
|
||||
* Release captain automation
|
||||
*/
|
||||
async function main(): Promise<void> {
|
||||
const config = loadReleaseConfig()
|
||||
const args = process.argv.slice(2)
|
||||
if (args.length === 0) {
|
||||
await runStep(config, 'help')
|
||||
console.error('The release tool expects at least 1 argument')
|
||||
return
|
||||
}
|
||||
|
||||
const step = args[0] as StepID
|
||||
const stepArguments = args.slice(1)
|
||||
ensureMainBranchUpToDate()
|
||||
await runStep(config, step, ...stepArguments)
|
||||
}
|
||||
|
||||
main().catch(error => console.error(error))
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,17 +0,0 @@
|
||||
import got from 'got'
|
||||
|
||||
import { readLine, cacheFolder } from './util'
|
||||
|
||||
export async function postMessage(message: string, channel: string): Promise<void> {
|
||||
const webhookURL = await readLine(
|
||||
`Enter the Slack webhook URL corresponding to the #${channel} channel (https://start.1password.com/open/i?a=HEDEDSLHPBFGRBTKAKJWE23XX4&v=dnrhbauihkhjs5ag6vszsme45a&i=pldpna5vivapxe4phewnqd42ji&h=team-sourcegraph.1password.com): `,
|
||||
`${cacheFolder}/slackWebhookURL-${channel}.txt`
|
||||
)
|
||||
await got.post(webhookURL, {
|
||||
body: JSON.stringify({ text: message, link_names: true }),
|
||||
})
|
||||
}
|
||||
|
||||
export function slackURL(text: string, url: string): string {
|
||||
return `<${url}|${text}>`
|
||||
}
|
||||
@ -1,152 +0,0 @@
|
||||
import type { SemVer } from 'semver'
|
||||
|
||||
import { type ReleaseConfig, setAWSExecutorVersion, setGoogleExecutorVersion, setSrcCliVersion } from './config'
|
||||
import { cloneRepo, createChangesets, type Edit, getAuthenticatedGitHubClient, releaseBlockerLabel } from './github'
|
||||
import {
|
||||
nextAWSExecutorVersionInputWithAutodetect,
|
||||
nextGoogleExecutorVersionInputWithAutodetect,
|
||||
nextSrcCliVersionInputWithAutodetect,
|
||||
pullRequestBody,
|
||||
} from './util'
|
||||
|
||||
export async function bakeSrcCliSteps(config: ReleaseConfig): Promise<Edit[]> {
|
||||
const client = await getAuthenticatedGitHubClient()
|
||||
|
||||
// NOTE(keegan): 2024-02-13 I am running the 5.3 release but this is all
|
||||
// borked. We used to run a src-cli reference doc generator, but we now
|
||||
// have a docs repo which uses mdx files. So the reference generator needs
|
||||
// to be updated. So for now I am just skipping this and will follow up
|
||||
// later. Leaving the original comment below since we still need the next
|
||||
// var to be calculated. Additionally just commenting out the broken code.
|
||||
//
|
||||
// ok, this seems weird that we're cloning src-cli here, so read on -
|
||||
// We have docs that live in the main src/src repo about src-cli. Each version we update these docs for any changes
|
||||
// from the most recent version of src-cli. Cool, makes sense.
|
||||
// The thing is that these docs are generated from src-cli itself (a literal command, src docs).
|
||||
// So our options are either to release a new version of src-cli, wait for the github action to be complete and THEN update the src/src repo,
|
||||
// OR we can assume that main is going to be the new version (which it is). So we will clone it and execute the
|
||||
// commands against the binary directly, saving ourselves a lot of time.
|
||||
const { workdir } = await cloneRepo(client, 'sourcegraph', 'src-cli', {
|
||||
revision: 'main',
|
||||
revisionMustExist: true,
|
||||
})
|
||||
|
||||
const next = await nextSrcCliVersionInputWithAutodetect(config, workdir)
|
||||
setSrcCliVersion(config, next.version)
|
||||
|
||||
return [
|
||||
combyReplace('const MinimumVersion = ":[1]"', next.version, 'internal/src-cli/consts.go'),
|
||||
// Broken since docs migration
|
||||
//`cd ${workdir}/cmd/src && go build`,
|
||||
//`cd doc/cli/references && go run ./doc.go --binaryPath="${workdir}/cmd/src/src"`,
|
||||
]
|
||||
}
|
||||
export async function bakeAWSExecutorsSteps(config: ReleaseConfig): Promise<void> {
|
||||
const client = await getAuthenticatedGitHubClient()
|
||||
const { workdir } = await cloneRepo(client, 'sourcegraph', 'terraform-aws-executors', {
|
||||
revision: 'master',
|
||||
revisionMustExist: true,
|
||||
})
|
||||
|
||||
const next = await nextAWSExecutorVersionInputWithAutodetect(config, workdir)
|
||||
setAWSExecutorVersion(config, next.version)
|
||||
console.log(next)
|
||||
|
||||
const prDetails = {
|
||||
body: pullRequestBody(`Update files for ${next.version} release`),
|
||||
title: `executor: v${next.version}`,
|
||||
commitMessage: `executor: v${next.version}`,
|
||||
}
|
||||
/*
|
||||
TODO prepare-release.sh commits and pushes the change, but
|
||||
createChangesets expects to do this. This needs to be fixed before the
|
||||
next minor release. I propose making prepare-release not commit and
|
||||
push. Or even better just get rid of it since its an overengineered
|
||||
wrapper around a single sed call. Then you can also remove the unshallow
|
||||
call.
|
||||
*/
|
||||
const sets = await createChangesets({
|
||||
requiredCommands: [],
|
||||
changes: [
|
||||
{
|
||||
...prDetails,
|
||||
owner: 'sourcegraph',
|
||||
repo: 'terraform-aws-executors',
|
||||
base: 'master',
|
||||
head: `release/prepare-${next.version}`,
|
||||
// prepare-release.sh needs full history to read tags
|
||||
edits: ['git fetch --unshallow', `./prepare-release.sh ${next.version}`],
|
||||
labels: [releaseBlockerLabel],
|
||||
draft: true,
|
||||
},
|
||||
],
|
||||
dryRun: config.dryRun.changesets,
|
||||
})
|
||||
console.log('Merge the following pull requests:\n')
|
||||
for (const set of sets) {
|
||||
console.log(set.pullRequestURL)
|
||||
}
|
||||
}
|
||||
|
||||
export async function bakeGoogleExecutorsSteps(config: ReleaseConfig): Promise<void> {
|
||||
const client = await getAuthenticatedGitHubClient()
|
||||
const { workdir } = await cloneRepo(client, 'sourcegraph', 'terraform-google-executors', {
|
||||
revision: 'master',
|
||||
revisionMustExist: true,
|
||||
})
|
||||
console.log(`Cloned sourcegraph/terraform-google-executors to ${workdir}`)
|
||||
|
||||
const next = await nextGoogleExecutorVersionInputWithAutodetect(config, workdir)
|
||||
setGoogleExecutorVersion(config, next.version)
|
||||
|
||||
const prDetails = {
|
||||
body: pullRequestBody(`Update files for ${next.version} release`),
|
||||
title: `executor: v${next.version}`,
|
||||
commitMessage: `executor: v${next.version}`,
|
||||
}
|
||||
/*
|
||||
TODO prepare-release.sh commits and pushes the change, but
|
||||
createChangesets expects to do this. This needs to be fixed before the
|
||||
next minor release. I propose making prepare-release not commit and
|
||||
push. Or even better just get rid of it since its an overengineered
|
||||
wrapper around a single sed call. Then you can also remove the unshallow
|
||||
call.
|
||||
*/
|
||||
const sets = await createChangesets({
|
||||
requiredCommands: [],
|
||||
changes: [
|
||||
{
|
||||
...prDetails,
|
||||
repo: 'terraform-google-executors',
|
||||
owner: 'sourcegraph',
|
||||
base: 'master',
|
||||
head: `release/prepare-${next.version}`,
|
||||
// prepare-release.sh needs full history to read tags
|
||||
edits: ['git fetch --unshallow', `./prepare-release.sh ${next.version}`],
|
||||
labels: [releaseBlockerLabel],
|
||||
draft: true,
|
||||
},
|
||||
],
|
||||
dryRun: config.dryRun.changesets,
|
||||
})
|
||||
console.log('Merge the following pull requests:\n')
|
||||
for (const set of sets) {
|
||||
console.log(set.pullRequestURL)
|
||||
}
|
||||
}
|
||||
|
||||
export function batchChangesInAppChangelog(version: SemVer, resetShow: boolean): Edit[] {
|
||||
const path = 'client/web/src/enterprise/batches/list/BatchChangesChangelogAlert.tsx'
|
||||
const steps = [combyReplace("const CURRENT_VERSION = ':[1]'", `${version.major}.${version.minor}`, path)]
|
||||
if (resetShow) {
|
||||
steps.push(combyReplace('const SHOW_CHANGELOG = :[1]', 'false', path))
|
||||
}
|
||||
return steps
|
||||
}
|
||||
|
||||
// given a comby pattern such as 'const MinimumVersion = ":[1]"' generate the comby expression to replace with provided substitution
|
||||
export function combyReplace(pattern: string, replace: string, path: string): Edit {
|
||||
pattern = pattern.replaceAll('"', '\\"')
|
||||
const sub = pattern.replace(':[1]', replace)
|
||||
return `comby -in-place "${pattern}" "${sub}" ${path}`
|
||||
}
|
||||
@ -1,7 +0,0 @@
|
||||
export const divider = '<!-- GENERATE UPGRADE GUIDE ON RELEASE (release tooling uses this to add entries) -->'
|
||||
|
||||
export const releaseTemplate = `${divider}
|
||||
|
||||
## Unreleased
|
||||
|
||||
<!-- Add changes changes to this section before release. -->`
|
||||
@ -1,528 +0,0 @@
|
||||
import { readdirSync, readFileSync, writeFileSync } from 'fs'
|
||||
import * as path from 'path'
|
||||
import * as readline from 'readline'
|
||||
|
||||
import type Octokit from '@octokit/rest'
|
||||
import chalk from 'chalk'
|
||||
import execa from 'execa'
|
||||
import { mkdir, readFile, writeFile } from 'mz/fs'
|
||||
import fetch from 'node-fetch'
|
||||
import * as semver from 'semver'
|
||||
import { SemVer } from 'semver'
|
||||
|
||||
import type { ReleaseConfig } from './config'
|
||||
import { getPreviousVersionExecutor, getPreviousVersionSrcCli } from './git'
|
||||
import { cloneRepo, type EditFunc, getAuthenticatedGitHubClient, listIssues } from './github'
|
||||
import * as update from './update'
|
||||
|
||||
const SOURCEGRAPH_RELEASE_INSTANCE_URL = 'https://sourcegraph.sourcegraph.com'
|
||||
|
||||
export interface ReleaseTag {
|
||||
repo: string
|
||||
nextTag: string
|
||||
workDir: string
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/consistent-type-assertions */
|
||||
export function formatDate(date: Date): string {
|
||||
return `${date.toLocaleString('en-US', {
|
||||
timeZone: 'UTC',
|
||||
dateStyle: 'medium',
|
||||
timeStyle: 'short',
|
||||
} as Intl.DateTimeFormatOptions)} (UTC)`
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/consistent-type-assertions */
|
||||
|
||||
const addZero = (index: number): string => (index < 10 ? `0${index}` : `${index}`)
|
||||
|
||||
/**
|
||||
* Generates a link for comparing given Date with local time.
|
||||
*/
|
||||
export function timezoneLink(date: Date, linkName: string): string {
|
||||
const timeString = `${addZero(date.getUTCHours())}${addZero(date.getUTCMinutes())}`
|
||||
return `https://time.is/${timeString}_${date.getUTCDate()}_${date.toLocaleString('en-US', {
|
||||
month: 'short',
|
||||
})}_${date.getUTCFullYear()}_in_UTC?${encodeURI(linkName)}`
|
||||
}
|
||||
|
||||
export const cacheFolder = './.secrets'
|
||||
|
||||
export async function readLine(prompt: string, cacheFile?: string): Promise<string> {
|
||||
if (!cacheFile) {
|
||||
return readLineNoCache(prompt)
|
||||
}
|
||||
|
||||
try {
|
||||
return (await readFile(cacheFile, { encoding: 'utf8' })).trimEnd()
|
||||
} catch {
|
||||
const userInput = await readLineNoCache(prompt)
|
||||
await mkdir(path.dirname(cacheFile), { recursive: true })
|
||||
await writeFile(cacheFile, userInput)
|
||||
return userInput
|
||||
}
|
||||
}
|
||||
|
||||
async function readLineNoCache(prompt: string): Promise<string> {
|
||||
const readlineInterface = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
})
|
||||
const userInput = await new Promise<string>(resolve => readlineInterface.question(prompt, resolve))
|
||||
readlineInterface.close()
|
||||
return userInput
|
||||
}
|
||||
|
||||
export async function verifyWithInput(prompt: string): Promise<void> {
|
||||
await readLineNoCache(chalk.yellow(`${prompt}\nInput yes to confirm: `)).then(val => {
|
||||
if (!(val === 'yes' || val === 'y')) {
|
||||
console.log(chalk.red('Aborting!'))
|
||||
process.exit(0)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// similar to verifyWithInput but will not exit and will allow the caller to decide what to do
|
||||
export async function softVerifyWithInput(prompt: string): Promise<boolean> {
|
||||
return readLineNoCache(chalk.yellow(`${prompt}\nInput yes to confirm: `)).then(val => val === 'yes' || val === 'y')
|
||||
}
|
||||
|
||||
export async function ensureDocker(): Promise<execa.ExecaReturnValue<string>> {
|
||||
return execa('docker', ['version'], { stdout: 'ignore' })
|
||||
}
|
||||
|
||||
export function changelogURL(version: string): string {
|
||||
const versionAnchor = version.replaceAll('.', '-')
|
||||
return `https://sourcegraph.com/github.com/sourcegraph/sourcegraph/-/blob/CHANGELOG.md#${versionAnchor}`
|
||||
}
|
||||
|
||||
function ensureBranchUpToDate(baseBranch: string, targetBranch: string): boolean {
|
||||
const [behind, ahead] = execa
|
||||
.sync('git', ['rev-list', '--left-right', '--count', targetBranch + '...' + baseBranch])
|
||||
.stdout.split('\t')
|
||||
|
||||
if (behind === '0' && ahead === '0') {
|
||||
return true
|
||||
}
|
||||
|
||||
const countCommits = function (numberOfCommits: string, aheadOrBehind: string): string {
|
||||
return numberOfCommits === '1'
|
||||
? numberOfCommits + ' commit ' + aheadOrBehind
|
||||
: numberOfCommits + ' commits ' + aheadOrBehind
|
||||
}
|
||||
|
||||
if (behind !== '0' && ahead !== '0') {
|
||||
console.log(
|
||||
`Your branch is ${countCommits(ahead, 'ahead')} and ${countCommits(
|
||||
behind,
|
||||
'behind'
|
||||
)} the branch ${targetBranch}.`
|
||||
)
|
||||
} else if (behind !== '0') {
|
||||
console.log(`Your branch is ${countCommits(behind, 'behind')} the branch ${targetBranch}.`)
|
||||
} else if (ahead !== '0') {
|
||||
console.log(`Your branch is ${countCommits(ahead, 'ahead')} the branch ${targetBranch}.`)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export function ensureMainBranchUpToDate(): void {
|
||||
const mainBranch = 'main'
|
||||
const remoteMainBranch = 'origin/main'
|
||||
const currentBranch = execa.sync('git', ['rev-parse', '--abbrev-ref', 'HEAD']).stdout.trim()
|
||||
if (currentBranch !== mainBranch) {
|
||||
console.log(
|
||||
`Expected to be on branch ${mainBranch}, but was on ${currentBranch}. Run \`git checkout ${mainBranch}\` to switch to the main branch.`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
execa.sync('git', ['remote', 'update'], { stdout: 'ignore' })
|
||||
if (!ensureBranchUpToDate(mainBranch, remoteMainBranch)) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
export function ensureReleaseBranchUpToDate(branch: string): void {
|
||||
const remoteBranch = 'origin/' + branch
|
||||
if (!ensureBranchUpToDate(branch, remoteBranch)) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
export async function getLatestSrcCliGithubRelease(): Promise<string> {
|
||||
return fetch('https://api.github.com/repos/sourcegraph/src-cli/releases/latest', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(json => json.tag_name)
|
||||
}
|
||||
|
||||
export async function ensureSrcCliUpToDate(): Promise<void> {
|
||||
const latestTag = await getLatestSrcCliGithubRelease()
|
||||
let installedTag = execa.sync('src', ['version']).stdout.split('\n')
|
||||
installedTag = installedTag[0].split(':')
|
||||
const trimmedInstalledTag = installedTag[1].trim()
|
||||
|
||||
if (trimmedInstalledTag !== latestTag) {
|
||||
try {
|
||||
console.log('Uprading src-cli to the latest version.')
|
||||
execa.sync('brew', ['upgrade', 'src-cli'])
|
||||
} catch (error) {
|
||||
console.log('Trouble upgrading src-cli:', error)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function ensureSrcCliEndpoint(): void {
|
||||
const srcEndpoint = process.env.SRC_ENDPOINT
|
||||
if (srcEndpoint !== SOURCEGRAPH_RELEASE_INSTANCE_URL) {
|
||||
throw new Error(`the $SRC_ENDPOINT provided doesn't match what is expected by the release tool.
|
||||
Expected $SRC_ENDPOINT to be "${SOURCEGRAPH_RELEASE_INSTANCE_URL}"`)
|
||||
}
|
||||
}
|
||||
|
||||
export async function getLatestTag(owner: string, repo: string): Promise<string> {
|
||||
return fetch(`https://api.github.com/repos/${owner}/${repo}/tags`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(json => json[0].name)
|
||||
}
|
||||
|
||||
interface ContainerRegistryCredential {
|
||||
username: string
|
||||
password: string
|
||||
hostname: string
|
||||
}
|
||||
|
||||
export async function getContainerRegistryCredential(registryHostname: string): Promise<ContainerRegistryCredential> {
|
||||
const registryUsername = await readLine(
|
||||
`Enter your container registry (${registryHostname} ) username: `,
|
||||
`${cacheFolder}/cr_${registryHostname.replace('.', '_')}_username.txt`
|
||||
)
|
||||
const registryPassword = await readLine(
|
||||
`Enter your container registry (${registryHostname} ) access token: `,
|
||||
`${cacheFolder}/cr_${registryHostname.replace('.', '_')}_password.txt`
|
||||
)
|
||||
const credential: ContainerRegistryCredential = {
|
||||
username: registryUsername,
|
||||
password: registryPassword,
|
||||
hostname: registryHostname,
|
||||
}
|
||||
return credential
|
||||
}
|
||||
|
||||
export type ContentFunc = (previousVersion?: string, nextVersion?: string) => string
|
||||
|
||||
const upgradeContentGenerators: { [s: string]: ContentFunc } = {
|
||||
docker_compose: () => '',
|
||||
kubernetes: () => '',
|
||||
server: () => '',
|
||||
pure_docker: (previousVersion?: string, nextVersion?: string) => {
|
||||
const compare = `compare/v${previousVersion}...v${nextVersion}`
|
||||
return `As a template, perform the same actions as the following diff in your own deployment: [\`Upgrade to v${nextVersion}\`](https://github.com/sourcegraph/deploy-sourcegraph-docker/${compare})
|
||||
\nFor non-standard replica builds:
|
||||
- [\`Customer Replica 1: ➔ v${nextVersion}\`](https://github.com/sourcegraph/deploy-sourcegraph-docker-customer-replica-1/${compare})`
|
||||
},
|
||||
}
|
||||
export const getUpgradeGuide = (mode: string): ContentFunc => upgradeContentGenerators[mode]
|
||||
|
||||
export const getAllUpgradeGuides = (previous: string, next: string): string[] =>
|
||||
Object.keys(upgradeContentGenerators).map(
|
||||
key => `Guide for: ${key}\n\n${upgradeContentGenerators[key](previous, next)}`
|
||||
)
|
||||
|
||||
export const updateUpgradeGuides = (previous: string, next: string): EditFunc => {
|
||||
let updateDirectory = '/doc/admin/updates'
|
||||
const notPatchRelease = next.endsWith('.0')
|
||||
|
||||
return (directory: string): void => {
|
||||
updateDirectory = directory + updateDirectory
|
||||
for (const file of readdirSync(updateDirectory)) {
|
||||
if (file === 'index.md') {
|
||||
continue
|
||||
}
|
||||
const mode = file.replace('.md', '')
|
||||
const updateFunc = getUpgradeGuide(mode)
|
||||
if (updateFunc === undefined) {
|
||||
console.log(`Skipping upgrade file: ${file} due to missing content generator`)
|
||||
continue
|
||||
}
|
||||
const guide = getUpgradeGuide(mode)(previous, next)
|
||||
|
||||
const fullPath = path.join(updateDirectory, file)
|
||||
console.log(`Updating upgrade guide: ${fullPath}`)
|
||||
let updateContents = readFileSync(fullPath).toString()
|
||||
const releaseHeader = `## v${previous} ➔ v${next}`
|
||||
const notesHeader = '\n\n#### Notes:'
|
||||
|
||||
if (notPatchRelease) {
|
||||
let content = `${update.releaseTemplate}\n\n${releaseHeader}`
|
||||
if (guide) {
|
||||
content = `${content}\n\n${guide}`
|
||||
}
|
||||
content = content + notesHeader
|
||||
updateContents = updateContents.replace(update.releaseTemplate, content)
|
||||
} else {
|
||||
const prevReleaseHeaderPattern = `##\\s+v\\d\\.\\d(?:\\.\\d)? ➔ v${previous}\\s*`
|
||||
const matches = updateContents.match(new RegExp(prevReleaseHeaderPattern))
|
||||
if (!matches || matches.length === 0) {
|
||||
console.log(`Unable to find header using pattern: ${prevReleaseHeaderPattern}. Skipping.`)
|
||||
continue
|
||||
}
|
||||
const prevReleaseHeader = matches[0]
|
||||
let content = `${releaseHeader}`
|
||||
if (guide) {
|
||||
content = `${content}\n\n${guide}`
|
||||
}
|
||||
content = content + notesHeader + `\n\n${prevReleaseHeader}`
|
||||
updateContents = updateContents.replace(prevReleaseHeader, content)
|
||||
}
|
||||
writeFileSync(fullPath, updateContents)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This is a copy of updateUpgradeGuides designed to target mdx files instead of md files and also search files in the docs/ rather than doc/ directory
|
||||
export const updateDocsUpgradeGuides = (previous: string, next: string): EditFunc => {
|
||||
let updateDirectory = '/docs/admin/updates'
|
||||
const notPatchRelease = next.endsWith('.0')
|
||||
|
||||
return (directory: string): void => {
|
||||
updateDirectory = directory + updateDirectory
|
||||
for (const file of readdirSync(updateDirectory)) {
|
||||
if (file === 'index.mdx') {
|
||||
continue
|
||||
}
|
||||
const mode = file.replace('.mdx', '')
|
||||
const updateFunc = getUpgradeGuide(mode)
|
||||
if (updateFunc === undefined) {
|
||||
console.log(`Skipping upgrade file: ${file} due to missing content generator`)
|
||||
continue
|
||||
}
|
||||
const guide = getUpgradeGuide(mode)(previous, next)
|
||||
|
||||
const fullPath = path.join(updateDirectory, file)
|
||||
console.log(`Updating upgrade guide: ${fullPath}`)
|
||||
let updateContents = readFileSync(fullPath).toString()
|
||||
const releaseHeader = `## v${previous} ➔ v${next}`
|
||||
const notesHeader = '\n\n#### Notes:'
|
||||
|
||||
if (notPatchRelease) {
|
||||
let content = `${update.releaseTemplate}\n\n${releaseHeader}`
|
||||
if (guide) {
|
||||
content = `${content}\n\n${guide}`
|
||||
}
|
||||
content = content + notesHeader
|
||||
updateContents = updateContents.replace(update.releaseTemplate, content)
|
||||
} else {
|
||||
const prevReleaseHeaderPattern = `##\\s+v\\d\\.\\d(?:\\.\\d)? ➔ v${previous}\\s*`
|
||||
const matches = updateContents.match(new RegExp(prevReleaseHeaderPattern))
|
||||
if (!matches || matches.length === 0) {
|
||||
console.log(`Unable to find header using pattern: ${prevReleaseHeaderPattern}. Skipping.`)
|
||||
continue
|
||||
}
|
||||
const prevReleaseHeader = matches[0]
|
||||
let content = `${releaseHeader}`
|
||||
if (guide) {
|
||||
content = `${content}\n\n${guide}`
|
||||
}
|
||||
content = content + notesHeader + `\n\n${prevReleaseHeader}`
|
||||
updateContents = updateContents.replace(prevReleaseHeader, content)
|
||||
}
|
||||
writeFileSync(fullPath, updateContents)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function retryInput(
|
||||
prompt: string,
|
||||
delegate: (val: string) => boolean,
|
||||
errorMessage?: string
|
||||
): Promise<string> {
|
||||
while (true) {
|
||||
const val = await readLine(prompt).then(value => value)
|
||||
if (delegate(val)) {
|
||||
return val
|
||||
}
|
||||
if (errorMessage) {
|
||||
console.log(chalk.red(errorMessage))
|
||||
} else {
|
||||
console.log(chalk.red('invalid input'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const blockingQuery = 'is:open org:sourcegraph label:release-blocker'
|
||||
|
||||
export async function getReleaseBlockers(
|
||||
octokit: Octokit
|
||||
): Promise<Octokit.SearchIssuesAndPullRequestsResponseItemsItem[]> {
|
||||
return listIssues(octokit, blockingQuery)
|
||||
}
|
||||
|
||||
export function backportIssueQuery(version: SemVer): string {
|
||||
return `is:open is:pr repo:sourcegraph org:sourcegraph label:"backported-to-${version.major}.${version.minor}"`
|
||||
}
|
||||
|
||||
export async function getBackportsForVersion(
|
||||
octokit: Octokit,
|
||||
version: SemVer
|
||||
): Promise<Octokit.SearchIssuesAndPullRequestsResponseItemsItem[]> {
|
||||
return listIssues(octokit, backportIssueQuery(version))
|
||||
}
|
||||
|
||||
export function releaseBlockerUri(): string {
|
||||
return issuesQueryUri(blockingQuery)
|
||||
}
|
||||
|
||||
function issuesQueryUri(query: string): string {
|
||||
return `https://github.com/issues?q=${encodeURIComponent(query)}`
|
||||
}
|
||||
|
||||
export async function validateNoOpenBackports(octokit: Octokit, version: SemVer): Promise<void> {
|
||||
const backports = await getBackportsForVersion(octokit, version)
|
||||
if (backports.length > 0) {
|
||||
await verifyWithInput(`${backportWarning(backports.length, version)})\nConfirm to proceed`)
|
||||
} else {
|
||||
console.log('No backports found!')
|
||||
}
|
||||
}
|
||||
|
||||
export async function backportStatus(octokit: Octokit, version: SemVer): Promise<string> {
|
||||
const backports = await getBackportsForVersion(octokit, version)
|
||||
return backportWarning(backports.length, version)
|
||||
}
|
||||
|
||||
export function backportWarning(numBackports: number, version: SemVer): string {
|
||||
return `Warning! There are ${chalk.red(numBackports)} backport pull requests open!\n${issuesQueryUri(
|
||||
backportIssueQuery(version)
|
||||
)}`
|
||||
}
|
||||
|
||||
export async function validateNoReleaseBlockers(octokit: Octokit): Promise<void> {
|
||||
const blockers = await getReleaseBlockers(octokit)
|
||||
if (blockers.length > 0) {
|
||||
await verifyWithInput(
|
||||
`Warning! There are ${chalk.red(
|
||||
blockers.length
|
||||
)} release blocking issues open!\n${releaseBlockerUri()}\nConfirm to proceed`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function nextSrcCliVersionInputWithAutodetect(config: ReleaseConfig, repoPath?: string): Promise<SemVer> {
|
||||
let next: SemVer
|
||||
if (!config.in_progress?.srcCliVersion) {
|
||||
if (!repoPath) {
|
||||
const client = await getAuthenticatedGitHubClient()
|
||||
const { workdir } = await cloneRepo(client, 'sourcegraph', 'src-cli', {
|
||||
revision: 'main',
|
||||
revisionMustExist: true,
|
||||
})
|
||||
repoPath = workdir
|
||||
}
|
||||
console.log('Attempting to detect previous src-cli version...')
|
||||
const previous = getPreviousVersionSrcCli(repoPath)
|
||||
console.log(chalk.blue(`Detected previous src-cli version: ${previous.version}`))
|
||||
next = previous.inc('minor')
|
||||
} else {
|
||||
next = new SemVer(config.in_progress.srcCliVersion)
|
||||
}
|
||||
|
||||
if (!(await softVerifyWithInput(`Confirm next version of src-cli should be: ${next.version}`))) {
|
||||
return new SemVer(
|
||||
await retryInput(
|
||||
'Enter the next version of src-cli: ',
|
||||
val => !!semver.parse(val),
|
||||
'Expected semver format'
|
||||
)
|
||||
)
|
||||
}
|
||||
return next
|
||||
}
|
||||
|
||||
export async function nextGoogleExecutorVersionInputWithAutodetect(
|
||||
config: ReleaseConfig,
|
||||
repoPath?: string
|
||||
): Promise<SemVer> {
|
||||
let next: SemVer
|
||||
if (!config.in_progress?.googleExecutorVersion) {
|
||||
if (!repoPath) {
|
||||
const client = await getAuthenticatedGitHubClient()
|
||||
const { workdir } = await cloneRepo(client, 'sourcegraph', 'terraform-google-executors', {
|
||||
revision: 'main',
|
||||
revisionMustExist: true,
|
||||
})
|
||||
repoPath = workdir
|
||||
}
|
||||
console.log('Attempting to detect previous executor version...')
|
||||
const previous = getPreviousVersionExecutor(repoPath)
|
||||
console.log(chalk.blue(`Detected previous executor version: ${previous.version}`))
|
||||
next = previous.inc('minor')
|
||||
} else {
|
||||
next = new SemVer(config.in_progress.googleExecutorVersion)
|
||||
}
|
||||
|
||||
if (
|
||||
!(await softVerifyWithInput(
|
||||
`Confirm next version of sourcegraph/terraform-google-executors should be: ${next.version}`
|
||||
))
|
||||
) {
|
||||
return new SemVer(
|
||||
await retryInput(
|
||||
'Enter the next version of executor: ',
|
||||
val => !!semver.parse(val),
|
||||
'Expected semver format'
|
||||
)
|
||||
)
|
||||
}
|
||||
return next
|
||||
}
|
||||
|
||||
export async function nextAWSExecutorVersionInputWithAutodetect(
|
||||
config: ReleaseConfig,
|
||||
repoPath?: string
|
||||
): Promise<SemVer> {
|
||||
let next: SemVer
|
||||
if (!config.in_progress?.awsExecutorVersion) {
|
||||
if (!repoPath) {
|
||||
const client = await getAuthenticatedGitHubClient()
|
||||
const { workdir } = await cloneRepo(client, 'sourcegraph', 'terraform-aws-executors', {
|
||||
revision: 'main',
|
||||
revisionMustExist: true,
|
||||
})
|
||||
repoPath = workdir
|
||||
}
|
||||
console.log('Attempting to detect previous executor version...')
|
||||
const previous = getPreviousVersionExecutor(repoPath)
|
||||
console.log(chalk.blue(`Detected previous sourcegraph/terraform-aws-executors version: ${previous.version}`))
|
||||
next = previous.inc('minor')
|
||||
} else {
|
||||
next = new SemVer(config.in_progress.awsExecutorVersion)
|
||||
}
|
||||
|
||||
if (!(await softVerifyWithInput(`Confirm next version of executor should be: ${next.version}`))) {
|
||||
return new SemVer(
|
||||
await retryInput(
|
||||
'Enter the next version of executor: ',
|
||||
val => !!semver.parse(val),
|
||||
'Expected semver format'
|
||||
)
|
||||
)
|
||||
}
|
||||
return next
|
||||
}
|
||||
|
||||
export function pullRequestBody(content: string): string {
|
||||
const header = 'This pull request was automatically generated by the release-tool.\n'
|
||||
const testPlan = '\n## Test Plan:\nN/A'
|
||||
return `${header}${content}${testPlan}`
|
||||
}
|
||||
@ -1,115 +0,0 @@
|
||||
<!--
|
||||
DO NOT COPY THIS ISSUE TEMPLATE MANUALLY. Use `pnpm run release tracking:issues` in the `sourcegraph/sourcegraph` repository.
|
||||
|
||||
Arguments:
|
||||
- $MAJOR
|
||||
- $MINOR
|
||||
- $PATCH
|
||||
- $RELEASE_DATE
|
||||
- $ONE_WORKING_DAY_AFTER_RELEASE
|
||||
-->
|
||||
|
||||
# $MAJOR.$MINOR.$PATCH patch release
|
||||
|
||||
This release is scheduled for **$RELEASE_DATE**.
|
||||
|
||||
> [!WARNING]
|
||||
> To get your commits in `main` included in this patch release, add the `backport-$MAJOR.$MINOR` to the PR to `main`.
|
||||
|
||||
## Setup
|
||||
|
||||
<!-- Keep in sync with release_issue_template's "Setup" section -->
|
||||
|
||||
- [ ] Ensure you have the latest version of the release tooling and configuration by checking out and updating `sourcegraph@main`.
|
||||
- [ ] Ensure release configuration in [`dev/release/release-config.jsonc`](https://sourcegraph.com/github.com/sourcegraph/sourcegraph/-/blob/dev/release/release-config.jsonc) on `main` has version $MAJOR.$MINOR.$PATCH selected by using the command:
|
||||
|
||||
```shell
|
||||
pnpm run release release:activate-release
|
||||
```
|
||||
|
||||
- [ ] Create the release tracking issue
|
||||
|
||||
```shell
|
||||
pnpm release tracking:issues
|
||||
```
|
||||
|
||||
## Prepare release
|
||||
|
||||
- [ ] Ensure that all [backported PRs](https://github.com/sourcegraph/sourcegraph/pulls?q=is%3Apr+is%3Aopen+base%3A$MAJOR.$MINOR) have been merged.
|
||||
|
||||
Create and test the first release candidate:
|
||||
|
||||
> [!NOTE]
|
||||
> Ensure that you've pulled both main and release branches before running this command.
|
||||
|
||||
- [ ] Push a new release candidate tag. This command will automatically detect the appropriate release candidate number. This command can be executed as many times as required, and will increment the release candidate number for each subsequent build: :
|
||||
|
||||
```sh
|
||||
pnpm run release release:create-candidate
|
||||
```
|
||||
|
||||
- [ ] Ensure that the following Buildkite pipelines all pass for the `v$MAJOR.$MINOR.$PATCH-rc.1` tag:
|
||||
- [ ] [Sourcegraph pipeline](https://buildkite.com/sourcegraph/sourcegraph/builds?branch=v$MAJOR.$MINOR.$PATCH-rc.1)
|
||||
- [ ] File any failures and regressions in the pipelines as `release-blocker` issues and assign the appropriate teams.
|
||||
|
||||
> [!NOTE]
|
||||
> You will need to re-check the above pipelines for any subsequent release candidates. You can see the Buildkite logs by tweaking the "branch" query parameter in the URLs to point to the desired release candidate. In general, the URL scheme looks like the following (replacing `N` in the URL): `https://buildkite.com/sourcegraph/sourcegraph/builds?branch=v$MAJOR.$MINOR.$PATCH-rc.N`
|
||||
|
||||
Once there is a release candidate available:
|
||||
|
||||
- [ ] Create a [Security release approval](https://github.com/sourcegraph/sourcegraph/issues/new?assignees=andreeleuterio%2C+evict%2C+willdollman%2C+mohammadualam&labels=release-blocker&projects=&template=security-release-approval.md&title=$MAJOR.$MINOR.$PATCH+Security+approval) issue and post a message in the [#discuss-security](https://sourcegraph.slack.com/archives/C1JH2BEHZ) channel tagging `@security-support`.
|
||||
|
||||
## Stage release
|
||||
|
||||
<!-- Keep in sync with release_issue_template's "Stage release" section -->
|
||||
|
||||
- [ ] Verify the **$MAJOR.$MINOR.$PATCH** section of [CHANGELOG](https://github.com/sourcegraph/sourcegraph/blob/main/CHANGELOG.md) on the `main` is accurate.
|
||||
- [ ] Ensure security has approved the [Security release approval](https://github.com/sourcegraph/sourcegraph/issues?q=label%3Arelease-blocker+Security+approval+is%3Aopen) issue you created.
|
||||
- [ ] Promote a release candidate to the final release build. You will need to provide the tag of the release candidate which you would like to promote as an argument. To get a list of available release candidates, you can use:
|
||||
```shell
|
||||
pnpm run release release:check-candidate
|
||||
```
|
||||
To promote the candidate, use the command:
|
||||
```sh
|
||||
pnpm run release release:promote-candidate <tag>
|
||||
```
|
||||
- [ ] Ensure that the pipeline for the `v$MAJOR.$MINOR.$PATCH` tag has passed: [Sourcegraph pipeline](https://buildkite.com/sourcegraph/sourcegraph/builds?branch=v$MAJOR.$MINOR.$PATCH)
|
||||
- [ ] Wait for the `$MAJOR.$MINOR.$PATCH` release Docker images to be available in [Docker Hub](https://hub.docker.com/r/sourcegraph/server/tags)
|
||||
- [ ] Open PRs that publish the new release and address any action items required to finalize draft PRs (track PR status via the [generated release batch change](https://sourcegraph.sourcegraph.com/organizations/sourcegraph/batch-changes/release-sourcegraph-$MAJOR.$MINOR.$PATCH)):
|
||||
```sh
|
||||
pnpm run release release:stage
|
||||
```
|
||||
|
||||
## Finalize release
|
||||
|
||||
<!-- Keep in sync with release_issue_template's "Finalize release" section, except no blog post -->
|
||||
|
||||
- [ ] From the [release batch change](https://sourcegraph.sourcegraph.com/organizations/sourcegraph/batch-changes/release-sourcegraph-$MAJOR.$MINOR.$PATCH), merge the release-publishing PRs created previously. Note: some PRs require certain actions performed before merging.
|
||||
- [ ] **After all the PRs are merged**, perform following checks/actions
|
||||
- For [deploy-sourcegraph](https://github.com/sourcegraph/deploy-sourcegraph)
|
||||
- [ ] Ensure the [release tag](https://github.com/sourcegraph/deploy-sourcegraph/tags) has been created
|
||||
- For [deploy-sourcegraph-docker](https://github.com/sourcegraph/deploy-sourcegraph-docker)
|
||||
- [ ] Ensure the [release tag](https://github.com/sourcegraph/deploy-sourcegraph-docker/tags) has been created
|
||||
- For [deploy-sourcegraph-helm](https://github.com/sourcegraph/deploy-sourcegraph-helm), also:
|
||||
- [ ] Update the [changelog](https://github.com/sourcegraph/deploy-sourcegraph-helm/blob/main/charts/sourcegraph/CHANGELOG.md) to include changes from the patch
|
||||
- [ ] Cherry-pick the release-publishing PR from the release branch into `main`
|
||||
- [ ] Announce that the release is live:
|
||||
```sh
|
||||
pnpm run release release:announce
|
||||
```
|
||||
|
||||
## Post-release
|
||||
|
||||
- [ ] Close the release:
|
||||
|
||||
```shell
|
||||
pnpm run release release:close
|
||||
```
|
||||
|
||||
- [ ] Open a PR to update [`dev/release/release-config.jsonc`](https://github.com/sourcegraph/sourcegraph/edit/main/dev/release/release-config.jsonc) after the auto-generated changes above if any.
|
||||
- [ ] Update the [CHANGELOG](https://github.com/sourcegraph/sourcegraph/blob/main/CHANGELOG.md) by opening and merging a PR into `main` (**not** the release branch), making the following changes:
|
||||
- [ ] Move the released changes into the $MAJOR.$MINOR.$PATCH version section
|
||||
- [ ] Add a new section for the [upcoming patch release](https://handbook.sourcegraph.com/departments/engineering/dev/process/releases/#current-patch-schedule) if this is not the last planned patch release for version $MAJOR.$MINOR
|
||||
|
||||
> [!NOTE]
|
||||
> If another patch release is requested after the release, ask that a [patch request issue](https://github.com/sourcegraph/sourcegraph/issues/new?assignees=&labels=team%2Fdistribution&template=request_patch_release.md) be filled out and approved first.
|
||||
@ -1,179 +0,0 @@
|
||||
<!--
|
||||
DO NOTE COPY THIS ISSUE TEMPLATE MANUALLY. Use `pnpm run release tracking:issues` in the `sourcegraph/sourcegraph` repository.
|
||||
|
||||
Arguments:
|
||||
- $MAJOR
|
||||
- $MINOR
|
||||
- $PATCH
|
||||
- $RELEASE_DATE
|
||||
- $SECURITY_REVIEW_DATE
|
||||
- $CODE_FREEZE_DATE
|
||||
-->
|
||||
|
||||
# $MAJOR.$MINOR release
|
||||
|
||||
This release is scheduled for **$RELEASE_DATE**.
|
||||
|
||||
---
|
||||
|
||||
## Setup
|
||||
|
||||
<!-- Keep in sync with patch_release_issue_template's "Setup" section -->
|
||||
|
||||
- [ ] Ensure you have the latest version of the release tooling and configuration by checking out and updating `sourcegraph@main`.
|
||||
- [ ] Ensure release configuration in [`dev/release/release-config.jsonc`](https://sourcegraph.com/github.com/sourcegraph/sourcegraph/-/blob/dev/release/release-config.jsonc) on `main` has version $MAJOR.$MINOR.$PATCH selected by using the command:
|
||||
|
||||
```shell
|
||||
pnpm run release release:activate-release
|
||||
```
|
||||
|
||||
## Security review ($SECURITY_REVIEW_DATE)
|
||||
|
||||
- [ ] Create a [Security release approval](https://github.com/sourcegraph/sourcegraph/issues/new/choose#:~:text=Security%20release%20approval) issue and post a message in the [#discuss-security](https://sourcegraph.slack.com/archives/C1JH2BEHZ) channel tagging `@security-support`.
|
||||
|
||||
## Cut release ($CODE_FREEZE_DATE)
|
||||
|
||||
Perform these steps three days before the release date to generate a stable release candidate.
|
||||
|
||||
### Prepare release
|
||||
|
||||
- [ ] Post a release status update to Slack - [review all release-blocking issues](https://github.com/sourcegraph/sourcegraph/issues?q=is%3Aopen+is%3Aissue+label%3Arelease-blocker), and ensure someone is resolving each.
|
||||
|
||||
```sh
|
||||
pnpm run release release:status
|
||||
```
|
||||
|
||||
Do the [branch cut](https://handbook.sourcegraph.com/departments/engineering/dev/process/releases/#release-branches) for the release:
|
||||
|
||||
- [ ] Update the changelog and create pull requests:
|
||||
|
||||
```sh
|
||||
pnpm run release changelog:cut
|
||||
```
|
||||
|
||||
- [ ] Manually review the pull requests created in the previous step and merge.
|
||||
- [ ] Wait for CI of the commit on `main` to pass.
|
||||
- [ ] Request Admin permissions of `sourcegraph/sourcegraph` repository through [Entitle](https://app.entitle.io/request?targetType=resource&duration=1800&justification=Temporarily%20disable%20the%20%22Require%20linear%20history%22%20rule%20for%20release%20branches%20to%20create%20a%20new%20release%20branch.&integrationId=032680b6-f13d-42aa-9837-38097b45f0fe&resourceId=cd16ad0f-0e7e-4f20-8a8c-b3c57751dafd&roleId=5151f2f3-40a3-4697-99a2-b5e756e43f5b&grantMethodId=5151f2f3-40a3-4697-99a2-b5e756e43f5b) in order to disable the [**Require linear history** protection rule for release branches](https://github.com/sourcegraph/sourcegraph/settings/branch_protection_rules/34536616#:~:text=Require%20linear%20history).
|
||||
|
||||
- [ ] Create the `$MAJOR.$MINOR` branch off the CHANGELOG commit in the previous step:
|
||||
|
||||
```sh
|
||||
pnpm run release release:branch-cut
|
||||
```
|
||||
|
||||
- [ ] Re-enable the [**Require linear history** protection rule for release branches](https://github.com/sourcegraph/sourcegraph/settings/branch_protection_rules/34536616#:~:text=Require%20linear%20history).
|
||||
|
||||
- [ ] Push a new release candidate tag. This command will automatically detect the appropriate release candidate number. This command can be executed as many times as required, and will increment the release candidate number for each subsequent build: :
|
||||
|
||||
```sh
|
||||
pnpm run release release:create-candidate
|
||||
```
|
||||
|
||||
- [ ] Ensure that the following Buildkite pipelines all pass for the `v$MAJOR.$MINOR.$PATCH-rc.N` tag:
|
||||
- [ ] [Sourcegraph pipeline](https://buildkite.com/sourcegraph/sourcegraph/builds?branch=v$MAJOR.$MINOR.$PATCH-rc.1)
|
||||
- [ ] Cross check all reported CVEs are in the accepted list (`https://handbook.sourcegraph.com/departments/security/tooling/trivy/$MAJOR-$MINOR-$PATCH`). You can use the utility command `sg release cve-check` to help with this step. Otherwise, alert `@security-support` in the [#release-guild](https://sourcegraph.slack.com/archives/C032Z79NZQC) channel ASAP.
|
||||
- [ ] File any failures and regressions in the pipelines as `release-blocker` issues and assign the appropriate teams.
|
||||
|
||||
Revert or disable features that may cause delays. As necessary, `git cherry-pick` bugfix (not feature!) commits from `main` into the release branch. Continue to create new release candidates as necessary, until no more `release-blocker` issues remain.
|
||||
|
||||
- [ ] Update the [target branch of the RC test instance](https://github.com/sourcegraph/cloud/blob/main/.github/workflows/mi_upgrade_rctest.yml#L51) to the new release branch `$MAJOR.$MINOR`.
|
||||
- [ ] Trigger a [manual run of the GitHub Action](https://github.com/sourcegraph/cloud/actions/workflows/mi_upgrade_rctest.yml) to upgrade the RC test instance. It should complete without an error, otherwise there might be a database migration problem that warrants a `release-blocker` issue.
|
||||
|
||||
> [!important]
|
||||
> You will need to re-check the above pipelines for any subsequent release candidates. You can see the Buildkite logs by tweaking the "branch" query parameter in the URLs to point to the desired release candidate. In general, the URL scheme looks like the following (replacing `N` in the URL):
|
||||
|
||||
- Sourcegraph: `https://buildkite.com/sourcegraph/sourcegraph/builds?branch=v$MAJOR.$MINOR.$PATCH-rc.N`
|
||||
|
||||
- [ ] Post a release status update to Slack:
|
||||
|
||||
```sh
|
||||
pnpm run release release:status
|
||||
```
|
||||
|
||||
## Code Freeze
|
||||
|
||||
Create candidates as necessary
|
||||
|
||||
```shell
|
||||
pnpm run release release:create-candidate
|
||||
```
|
||||
|
||||
Monitor the release branch, and backports. Ensure the branch remains healthy.
|
||||
|
||||
## Release day ($RELEASE_DATE)
|
||||
|
||||
### Stage release
|
||||
|
||||
<!-- Keep in sync with patch_release_issue's "Stage release" section -->
|
||||
|
||||
On the day of the release, confirm there are no more release-blocking issues (as reported by the `release:status` command), then proceed with creating the final release:
|
||||
|
||||
- [ ] Bake constants and other static values into the release branch (and also update main) This requires the release branch exists (should be automated above).
|
||||
```shell
|
||||
pnpm run release release:bake-content
|
||||
```
|
||||
- [ ] Merge the resulting pull requests for the content bake generated by the command above
|
||||
- [ ] Release a new version of src-cli, terraform-google-executors, aws-executors
|
||||
```shell
|
||||
pnpm run release release:create-tags
|
||||
```
|
||||
- [ ] Ensure the latest version of src-cli is available in all sources. You may need to run this command a few times in the background.
|
||||
```shell
|
||||
pnpm run release release:verify-releases
|
||||
```
|
||||
- [ ] Make another release candidate with the baked content
|
||||
- [ ] Make sure [CHANGELOG entries](https://github.com/sourcegraph/sourcegraph/blob/main/CHANGELOG.md) have been moved from **Unreleased** to **$MAJOR.$MINOR.$PATCH**, but exluding the ones that merged to `main` after the branch cut (whose changes are not in the `$MAJOR.$MINOR` branch).
|
||||
- [ ] Ensure security has approved the [Security release approval](https://github.com/sourcegraph/sourcegraph/issues?q=label%3Arelease-blocker+Security+approval+is%3Aopen) issue you created.
|
||||
- [ ] Make sure [deploy-sourcegraph-helm CHANGELOG entries](https://github.com/sourcegraph/deploy-sourcegraph-helm/blob/main/charts/sourcegraph/CHANGELOG.md) have been moved from **Unreleased** to **$MAJOR.$MINOR.$PATCH**, but exluding the ones that merged to `main` after the branch cut (whose changes are not in the `$MAJOR.$MINOR` branch).
|
||||
- [ ] Promote a release candidate to the final release build. You will need to provide the tag of the release candidate which you would like to promote as an argument. To get a list of available release candidates, you can use:
|
||||
|
||||
```shell
|
||||
pnpm run release release:check-candidate
|
||||
```
|
||||
|
||||
To promote the candidate, use the command:
|
||||
|
||||
```sh
|
||||
pnpm run release release:promote-candidate <tag>
|
||||
```
|
||||
|
||||
- [ ] Ensure that the following pipelines all pass for the `v$MAJOR.$MINOR.$PATCH` tag:
|
||||
- [ ] [Sourcegraph pipeline](https://buildkite.com/sourcegraph/sourcegraph/builds?branch=v$MAJOR.$MINOR.$PATCH)
|
||||
- [ ] Wait for the `v$MAJOR.$MINOR.$PATCH` release Docker images to be available in [Docker Hub](https://hub.docker.com/r/sourcegraph/server/tags)
|
||||
- [ ] Open PRs that publish the new release and address any action items required to finalize draft PRs (track PR status via the [generated release batch change](https://sourcegraph.sourcegraph.com/organizations/sourcegraph/batch-changes)):
|
||||
|
||||
```sh
|
||||
pnpm run release release:stage
|
||||
```
|
||||
|
||||
### Finalize release
|
||||
|
||||
- [ ] From the [release batch change](https://sourcegraph.sourcegraph.com/organizations/sourcegraph/batch-changes), merge the release-publishing PRs created previously.
|
||||
- For [deploy-sourcegraph](https://github.com/sourcegraph/deploy-sourcegraph)
|
||||
- [ ] Ensure the [release tag `v$MAJOR.$MINOR.$PATCH`](https://github.com/sourcegraph/deploy-sourcegraph/tags) has been created
|
||||
- For [deploy-sourcegraph-docker](https://github.com/sourcegraph/deploy-sourcegraph-docker)
|
||||
- [ ] Ensure the [release tag `v$MAJOR.$MINOR.$PATCH`](https://github.com/sourcegraph/deploy-sourcegraph-docker/tags) has been created
|
||||
- For [deploy-sourcegraph-helm](https://github.com/sourcegraph/deploy-sourcegraph-helm)
|
||||
- [ ] Cherry pick the release-publishing PR from the release branch into main
|
||||
- [ ] Alert the marketing team in [#release-post](https://sourcegraph.slack.com/archives/C022Y5VUSBU) that they can merge the release post.
|
||||
- [ ] Announce that the release is live:
|
||||
```sh
|
||||
pnpm run release release:announce
|
||||
```
|
||||
- [ ] Disable the `release-protector` github action in sourcegraph/sourcegraph. This may require you to request admin permissions using Entitle.
|
||||
|
||||
### Post-release
|
||||
|
||||
- [ ] Create release calendar events, tracking issue, and announcement for next release (note: these commands will prompt for user input to generate the definition for the next release):
|
||||
```sh
|
||||
pnpm run release release:prepare
|
||||
pnpm run release tracking:issues
|
||||
pnpm run release tracking:timeline
|
||||
```
|
||||
- [ ] Close the release.
|
||||
```sh
|
||||
pnpm run release release:close
|
||||
```
|
||||
- [ ] Open a PR to update [`dev/release/release-config.jsonc`](https://sourcegraph.com/github.com/sourcegraph/sourcegraph/-/blob/dev/release/release-config.jsonc) with the auto-generated changes from above.
|
||||
|
||||
**Note:** If a patch release is requested after the release, ask that a [patch request issue](https://github.com/sourcegraph/sourcegraph/issues/new?assignees=&labels=team%2Fdistribution&template=request_patch_release.md&title=$MAJOR.$MINOR.1%3A+) be filled out and approved first.
|
||||
@ -1,24 +0,0 @@
|
||||
<!--
|
||||
DO NOTE COPY THIS ISSUE TEMPLATE MANUALLY. Use `pnpm run release tracking:issues` in the `sourcegraph/sourcegraph` repository.
|
||||
|
||||
Arguments:
|
||||
- $MAJOR
|
||||
- $MINOR
|
||||
- $PATCH
|
||||
- $RELEASE_DATE
|
||||
- $ONE_WORKING_DAY_AFTER_RELEASE
|
||||
-->
|
||||
|
||||
# $MAJOR.$MINOR.$PATCH Security Review: Container Images
|
||||
|
||||
This is a Release Blocker issue to nudge the Release Captain to check
|
||||
with the Security Team whether there are any security patches that must be
|
||||
applied to Sourcegraph Container Images before the release is published.
|
||||
|
||||
See [RFC 678](https://docs.google.com/document/d/1v0TXVLPCNA42cQwIYeGLICoy6zfK9KprMRpMa2Fn6IE/edit#) for more information.
|
||||
|
||||
The Security Team should use **this** issue to communicate and coordinate
|
||||
actions to meet our obligations under the [Vulerability Management Process](../../../../security/vulnerability-management-process.md).
|
||||
|
||||
After all items are resolved, this issue can be closed an the release should be
|
||||
considered unblocked by the Security Assessment.
|
||||
@ -1,22 +0,0 @@
|
||||
{
|
||||
"name": "release-sourcegraph-3.30.3",
|
||||
"description": "Track publishing of sourcegraph@3.30.3",
|
||||
"changes": [
|
||||
{
|
||||
"repository": "sourcegraph/sourcegraph",
|
||||
"pullRequestNumber": 23337
|
||||
},
|
||||
{
|
||||
"repository": "sourcegraph/about",
|
||||
"pullRequestNumber": 3942
|
||||
},
|
||||
{
|
||||
"repository": "sourcegraph/deploy-sourcegraph",
|
||||
"pullRequestNumber": 3524
|
||||
},
|
||||
{
|
||||
"repository": "sourcegraph/deploy-sourcegraph-docker",
|
||||
"pullRequestNumber": 482
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"rootDir": "src",
|
||||
"outDir": "out",
|
||||
},
|
||||
"include": ["./src/**/*", "./src/**/*.json"],
|
||||
"exclude": ["./node_modules", "../../node_modules", "out"],
|
||||
}
|
||||
@ -44,7 +44,6 @@
|
||||
"storybook:web": "pnpm --filter @sourcegraph/web run storybook",
|
||||
"storybook:wildcard": "pnpm --filter @sourcegraph/wildcard run storybook",
|
||||
"storybook:build": "pnpm --filter @sourcegraph/storybook run build",
|
||||
"release": "cd dev/release && pnpm run release",
|
||||
"docsite:serve": "./dev/docsite.sh -config doc/docsite.json serve -http=localhost:5080",
|
||||
"build-browser-extension": "pnpm --filter @sourcegraph/browser run build",
|
||||
"chromatic": "CHROMATIC=true pnpm run _chromatic --storybook-config-dir client/storybook/src --build-script-name=storybook:build",
|
||||
|
||||
@ -1695,15 +1695,6 @@ importers:
|
||||
specifier: workspace:*
|
||||
version: link:../testing
|
||||
|
||||
dev/release:
|
||||
dependencies:
|
||||
'@types/luxon':
|
||||
specifier: ^3.2.0
|
||||
version: 3.2.0
|
||||
luxon:
|
||||
specifier: ^3.2.1
|
||||
version: 3.2.1
|
||||
|
||||
schema: {}
|
||||
|
||||
packages:
|
||||
@ -11075,10 +11066,6 @@ packages:
|
||||
resolution: {integrity: sha512-69x+Dhrm2aShFkTqUuPgUXbKYwvq4FH/DVeeQH7MBfTjbKjPX51NGLERxVV1vf33N71dzLvXCko4OLqRvsq53Q==}
|
||||
dev: true
|
||||
|
||||
/@types/luxon@3.2.0:
|
||||
resolution: {integrity: sha512-lGmaGFoaXHuOLXFvuju2bfvZRqxAqkHPx9Y9IQdQABrinJJshJwfNCKV+u7rR3kJbiqfTF/NhOkcxxAFrObyaA==}
|
||||
dev: false
|
||||
|
||||
/@types/marked@4.0.3:
|
||||
resolution: {integrity: sha512-HnMWQkLJEf/PnxZIfbm0yGJRRZYYMhb++O9M36UCTA9z53uPvVoSlAwJr3XOpDEryb7Hwl1qAx/MV6YIW1RXxg==}
|
||||
dev: true
|
||||
@ -19396,11 +19383,6 @@ packages:
|
||||
resolution: {integrity: sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==}
|
||||
dev: true
|
||||
|
||||
/luxon@3.2.1:
|
||||
resolution: {integrity: sha512-QrwPArQCNLAKGO/C+ZIilgIuDnEnKx5QYODdDtbFaxzsbZcc/a7WFq7MhsVYgRlwawLtvOUESTlfJ+hc/USqPg==}
|
||||
engines: {node: '>=12'}
|
||||
dev: false
|
||||
|
||||
/lz-string@1.5.0:
|
||||
resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==}
|
||||
hasBin: true
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
packages:
|
||||
- 'client/*'
|
||||
- 'schema'
|
||||
- 'dev/release'
|
||||
|
||||
@ -19,7 +19,6 @@
|
||||
{ "path": "client/extension-api-types" },
|
||||
{ "path": "client/storybook" },
|
||||
{ "path": "client/testing" },
|
||||
{ "path": "dev/release" },
|
||||
{ "path": "schema" },
|
||||
{ "path": "client/codeintellify" },
|
||||
{ "path": "client/client-api" },
|
||||
|
||||
Loading…
Reference in New Issue
Block a user