codeintel: re-enable qa with audit logs dumping (#38554)

This commit is contained in:
Noah S-C 2022-07-11 17:44:16 -07:00 committed by GitHub
parent 4193d693e7
commit 979148a999
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 102 additions and 15 deletions

View File

@ -31,8 +31,6 @@ func init() {
}
func main() {
// FIXME: Flaky test in CI. Re-enable https://github.com/sourcegraph/sourcegraph/issues/37671
return
if err := flag.CommandLine.Parse(os.Args[1:]); err != nil {
fmt.Fprintf(os.Stderr, "error: %s\n", err)
os.Exit(1)

View File

@ -36,8 +36,6 @@ func init() {
}
func main() {
// FIXME: Flaky test in CI. Re-enable https://github.com/sourcegraph/sourcegraph/issues/37671
return
if err := flag.CommandLine.Parse(os.Args[1:]); err != nil {
fmt.Fprintf(os.Stderr, "error: %s\n", err)
os.Exit(1)

View File

@ -31,8 +31,6 @@ func init() {
}
func main() {
// FIXME: Flaky test in CI. Re-enable https://github.com/sourcegraph/sourcegraph/issues/37671
return
if err := flag.CommandLine.Parse(os.Args[1:]); err != nil {
fmt.Fprintf(os.Stderr, "error: %s\n", err)
os.Exit(1)

View File

@ -74,7 +74,29 @@ func monitor(ctx context.Context, repoNames []string, uploads []uploadMeta) erro
fmt.Printf("[%5s] %s Finished processing index for %s@%s\n", internal.TimeSince(start), internal.EmojiSuccess, repoName, uploadState.upload.commit[:7])
}
} else if uploadState.state != "QUEUED" && uploadState.state != "PROCESSING" {
return errors.Newf("unexpected state '%s' for %s@%s", uploadState.state, uploadState.upload.repoName, uploadState.upload.commit[:7])
var payload struct {
Data struct {
LsifUploads struct {
Nodes []struct {
ID string
AuditLogs auditLogs
}
}
}
}
if err := internal.GraphQLClient().GraphQL(internal.SourcegraphAccessToken, uploadsQueryFragment, nil, &payload); err != nil {
return errors.Newf("unexpected state '%s' for %s@%s\nAudit Logs:\n%s", uploadState.state, uploadState.upload.repoName, uploadState.upload.commit[:7], errors.Wrap(err, "error getting audit logs"))
}
var logs auditLogs
for _, upload := range payload.Data.LsifUploads.Nodes {
if upload.ID == uploadState.upload.id {
logs = upload.AuditLogs
}
}
return errors.Newf("unexpected state '%s' for %s@%s\nAudit Logs:\n%s", uploadState.state, uploadState.upload.repoName, uploadState.upload.commit[:7], logs)
}
}
@ -155,14 +177,18 @@ func queryRepoState(_ context.Context, repoNames []string, uploads []uploadMeta)
index, _ := strconv.Atoi(name[1:])
upload := uploads[index]
state[upload.repoName] = repoState{
stale: state[upload.repoName].stale,
uploadStates: append(state[upload.repoName].uploadStates, uploadState{
upload: upload,
state: data.State,
failure: data.Failure,
}),
uState := uploadState{
upload: upload,
state: data.State,
failure: data.Failure,
}
repoState := repoState{
stale: state[upload.repoName].stale,
uploadStates: append(state[upload.repoName].uploadStates, uState),
}
state[upload.repoName] = repoState
}
}
@ -199,6 +225,26 @@ const uploadQueryFragment = `
}
`
const uploadsQueryFragment = `
query CodeIntelQA_UploadsList {
lsifUploads(includeDeleted: true) {
nodes {
id
auditLogs {
logTimestamp
reason
changedColumns {
column
old
new
}
operation
}
}
}
}
`
type jsonUploadResult struct {
State string `json:"state"`
Failure string `json:"failure"`
@ -208,3 +254,51 @@ type jsonUploadResult struct {
type jsonCommitGraphResult struct {
Stale bool `json:"stale"`
}
type auditLogs []auditLog
type auditLog struct {
LogTimestamp time.Time `json:"logTimestamp"`
Reason *string `json:"reason"`
Operation string `json:"operation"`
ChangedColumns []struct {
Old *string `json:"old"`
New *string `json:"new"`
Column string `json:"column"`
} `json:"changedColumns"`
}
func (a auditLogs) String() string {
var s strings.Builder
for _, log := range a {
s.WriteString("Time: ")
s.WriteString(log.LogTimestamp.String())
s.Write([]byte("\n\t"))
s.WriteString("Operation: ")
s.WriteString(log.Operation)
if log.Reason != nil && *log.Reason != "" {
s.Write([]byte("\n\t"))
s.WriteString("Reason: ")
s.WriteString(*log.Reason)
}
s.Write([]byte("\n\t\t"))
for i, change := range log.ChangedColumns {
s.WriteString(fmt.Sprintf("Column: '%s', Old: '%s', New: '%s'", change.Column, ptrPrint(change.Old), ptrPrint(change.New)))
if i < len(log.ChangedColumns) {
s.Write([]byte("\n\t\t"))
}
}
s.WriteRune('\n')
}
return s.String()
}
func ptrPrint(s *string) string {
if s == nil {
return "NULL"
}
return *s
}

View File

@ -557,7 +557,6 @@ func triggerReleaseBranchHealthchecks(minimumUpgradeableVersion string) operatio
func codeIntelQA(candidateTag string) operations.Operation {
return func(p *bk.Pipeline) {
p.AddStep(":docker::brain: Code Intel QA",
bk.Skip("Disabled because flaky"),
// Run tests against the candidate server image
bk.DependsOn(candidateImageStepKey("server")),
bk.Env("CANDIDATE_VERSION", candidateTag),