dev/buildchecker: add ability to export history to honeycomb (#31360)

This commit is contained in:
Robert Lin 2022-02-24 14:50:35 -08:00 committed by GitHub
parent dd90774a29
commit fadc40dd1f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 177 additions and 45 deletions

3
.gitignore vendored
View File

@ -1,3 +1,6 @@
# Temporary directory for whatever you want
.tmp/
# Vim
*.swp

View File

@ -28,10 +28,26 @@ Also see the [`buildchecker` GitHub Action workflow](../../.github/workflows/bui
Writes aggregated historical data, including the builds it finds, to a few files.
```sh
go run ./dev/buildchecker -buildkite.token=$BUILDKITE_TOKEN -failures.timeout=999 -created.from="2021-08-01" history
go run ./dev/buildchecker \
-buildkite.token=$BUILDKITE_TOKEN \
-failures.timeout=999 \
-created.from="2021-08-01" \
history
```
To load builds from a file instead of fetching from Buildkite, use `-load-from="$FILE"`.
To load builds from a file instead of fetching from Buildkite, use `-builds.load-from="$FILE"`.
You can also send metrics to Honeycomb with `-honeycomb.dataset` and `-honeycomb.token`:
```sh
go run ./dev/buildchecker \
-builds.load-from=".tmp/builds.json" \
-failures.timeout=999 \
-created.from="2021-08-01" \
-honeycomb.dataset="buildkite-history" \
-honeycomb.token=$HONEYCOMB_TOKEN \
history
```
## Tokens

View File

@ -65,8 +65,10 @@ func generateHistory(builds []buildkite.Build, windowStart time.Time, opts Check
return
}
const dateFormat = "2006-01-02"
func buildDate(created time.Time) string {
return created.Format("2006/01/02")
return created.Format(dateFormat)
}
func mapToRecords(m map[string]int) (records [][]string) {
@ -75,20 +77,29 @@ func mapToRecords(m map[string]int) (records [][]string) {
}
// Sort by date ascending
sort.Slice(records, func(i, j int) bool {
iDate, _ := time.Parse("2006/01/02", records[i][0])
jDate, _ := time.Parse("2006/01/02", records[j][0])
iDate, _ := time.Parse(dateFormat, records[i][0])
jDate, _ := time.Parse(dateFormat, records[j][0])
return iDate.Before(jDate)
})
// TODO Fill in the gaps maybe?
// prev := records[0]
// for _, r := range records {
// rDate, _ := time.Parse("2006/01/02", r[0])
// prevDate, _ := time.Parse("2006/01/02", prev[0])
// if rDate.Sub(prevDate) > 24*time.Hour {
// records = append(a[:index+1], a[index:]...)
// a[index] = value
// }
// prev = r
// }
if len(records) <= 1 {
return
}
// Fill in the gaps
prev := records[0]
length := len(records)
for index := 0; index < length; index++ {
record := records[index]
recordDate, _ := time.Parse(dateFormat, record[0])
prevDate, _ := time.Parse(dateFormat, prev[0])
for gapDate := prevDate.Add(24 * time.Hour); recordDate.Sub(gapDate) >= 24*time.Hour; gapDate = gapDate.Add(24 * time.Hour) {
insertRecord := []string{gapDate.Format(dateFormat), "0"}
records = append(records[:index], append([][]string{insertRecord}, records[index:]...)...)
index += 1
length += 1
}
prev = record
}
return
}

View File

@ -10,7 +10,7 @@ import (
func TestGenerateHistory(t *testing.T) {
day := time.Date(2006, 01, 02, 0, 0, 0, 0, time.UTC)
dayString := day.Format("2006/01/02")
dayString := day.Format("2006-01-02")
tests := []struct {
name string
@ -137,3 +137,46 @@ func TestGenerateHistory(t *testing.T) {
})
}
}
func TestMapToRecords(t *testing.T) {
tests := []struct {
name string
arg map[string]int
wantRecords [][]string
}{{
name: "sorted",
arg: map[string]int{
"2022-01-02": 2,
"2022-01-01": 1,
"2022-01-03": 3,
},
wantRecords: [][]string{
{"2022-01-01", "1"},
{"2022-01-02", "2"},
{"2022-01-03", "3"},
},
}, {
name: "gaps filled in",
arg: map[string]int{
"2022-01-01": 1,
"2022-01-03": 3,
"2022-01-06": 6,
"2022-01-07": 7,
},
wantRecords: [][]string{
{"2022-01-01", "1"},
{"2022-01-02", "0"},
{"2022-01-03", "3"},
{"2022-01-04", "0"},
{"2022-01-05", "0"},
{"2022-01-06", "6"},
{"2022-01-07", "7"},
},
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotRecords := mapToRecords(tt.arg)
assert.Equal(t, tt.wantRecords, gotRecords)
})
}
}

View File

@ -8,11 +8,13 @@ import (
"fmt"
"log"
"os"
"path/filepath"
"strings"
"time"
"github.com/buildkite/go-buildkite/v3/buildkite"
"github.com/google/go-github/v41/github"
libhoney "github.com/honeycombio/libhoney-go"
"github.com/slack-go/slack"
"golang.org/x/oauth2"
@ -54,8 +56,11 @@ func main() {
historyFlags := &cmdHistoryFlags{}
flag.StringVar(&historyFlags.createdFromDate, "created.from", "", "date in YYYY-MM-DD format")
flag.StringVar(&historyFlags.createdToDate, "created.to", "", "date in YYYY-MM-DD format")
flag.StringVar(&historyFlags.loadFrom, "load-from", "", "file to load builds from")
flag.StringVar(&historyFlags.writeTo, "write-to", "builds.json", "file to write builds to (unused if loading from file)")
flag.StringVar(&historyFlags.buildsLoadFrom, "builds.load-from", "", "file to load builds from - if unset, fetches from Buildkite")
flag.StringVar(&historyFlags.buildsWriteTo, "builds.write-to", ".tmp/builds.json", "file to write builds to (unused if loading from file)")
flag.StringVar(&historyFlags.resultsCsvPath, "csv", ".tmp/", "path for CSV results exports")
flag.StringVar(&historyFlags.honeycombDataset, "honeycomb.dataset", "", "honeycomb dataset to publish to")
flag.StringVar(&historyFlags.honeycombToken, "honeycomb.token", "", "honeycomb API token")
flags.Parse()
@ -162,12 +167,16 @@ func cmdCheck(ctx context.Context, flags *Flags, checkFlags *cmdCheckFlags) {
type cmdHistoryFlags struct {
createdFromDate string
createdToDate string
loadFrom string
writeTo string
buildsLoadFrom string
buildsWriteTo string
resultsCsvPath string
honeycombDataset string
honeycombToken string
}
func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags) {
// Time range
var err error
createdFrom := time.Now().Add(-24 * time.Hour)
@ -188,7 +197,8 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags
// Get builds
var builds []buildkite.Build
if historyFlags.loadFrom == "" {
if historyFlags.buildsLoadFrom == "" {
// Load builds from Buildkite if no cached builds configured
log.Println("fetching builds from Buildkite")
// Buildkite client
@ -201,8 +211,10 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags
// Paginate results
var nextPage = 1
var pages int
log.Printf("request paging progress:")
for nextPage > 0 {
pages++
fmt.Printf(" %d", pages)
// Newest is returned first https://buildkite.com/docs/apis/rest-api/builds#list-builds-for-a-pipeline
pageBuilds, resp, err := bkc.Builds.ListByPipeline("sourcegraph", flags.Pipeline, &buildkite.BuildsListOptions{
@ -211,10 +223,8 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags
CreatedTo: createdTo,
IncludeRetriedJobs: false,
ListOptions: buildkite.ListOptions{
Page: nextPage,
// Fix to high page size just in case, default is 30
// https://buildkite.com/docs/apis/rest-api#pagination
PerPage: 99,
Page: nextPage,
PerPage: 50,
},
})
if err != nil {
@ -224,20 +234,24 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags
builds = append(builds, pageBuilds...)
nextPage = resp.NextPage
}
fmt.Println() // end line for progress spinner
buildsJSON, err := json.Marshal(&builds)
if err != nil {
log.Fatal("json.Marshal(&builds): ", err)
}
if historyFlags.writeTo != "" {
if err := os.WriteFile(historyFlags.writeTo, buildsJSON, os.ModePerm); err != nil {
if historyFlags.buildsWriteTo != "" {
// Cache builds for ease of re-running analyses
log.Printf("Caching discovered builts in %s\n", historyFlags.buildsWriteTo)
buildsJSON, err := json.Marshal(&builds)
if err != nil {
log.Fatal("json.Marshal(&builds): ", err)
}
if err := os.WriteFile(historyFlags.buildsWriteTo, buildsJSON, os.ModePerm); err != nil {
log.Fatal("os.WriteFile: ", err)
}
log.Println("wrote to " + historyFlags.writeTo)
log.Println("wrote to " + historyFlags.buildsWriteTo)
}
} else {
log.Printf("loading builds from %s\n", historyFlags.loadFrom)
data, err := os.ReadFile(historyFlags.loadFrom)
// Load builds from configured path
log.Printf("loading builds from %s\n", historyFlags.buildsLoadFrom)
data, err := os.ReadFile(historyFlags.buildsLoadFrom)
if err != nil {
log.Fatal("os.ReadFile: ", err)
}
@ -268,18 +282,63 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags
log.Printf("inferred %d builds as failed", inferredFail)
// Generate history
totals, flakes, incidents := generateHistory(builds, createdTo, CheckOptions{
checkOpts := CheckOptions{
FailuresThreshold: flags.FailuresThreshold,
BuildTimeout: time.Duration(flags.FailuresTimeoutMins) * time.Minute,
})
}
log.Printf("running analyses with options: %+v\n", checkOpts)
totals, flakes, incidents := generateHistory(builds, createdTo, checkOpts)
// Write to files
var errs error
errs = errors.CombineErrors(errs, writeCSV("./totals.csv", mapToRecords(totals)))
errs = errors.CombineErrors(errs, writeCSV("./flakes.csv", mapToRecords(flakes)))
errs = errors.CombineErrors(errs, writeCSV("./incidents.csv", mapToRecords(incidents)))
if errs != nil {
log.Fatal("csv.WriteAll: ", errs)
if historyFlags.resultsCsvPath != "" {
// Write to files
log.Printf("Writing CSV results to %s\n", historyFlags.resultsCsvPath)
var errs error
errs = errors.CombineErrors(errs, writeCSV(filepath.Join(historyFlags.resultsCsvPath, "totals.csv"), mapToRecords(totals)))
errs = errors.CombineErrors(errs, writeCSV(filepath.Join(historyFlags.resultsCsvPath, "flakes.csv"), mapToRecords(flakes)))
errs = errors.CombineErrors(errs, writeCSV(filepath.Join(historyFlags.resultsCsvPath, "incidents.csv"), mapToRecords(incidents)))
if errs != nil {
log.Fatal("csv.WriteAll: ", errs)
}
}
if historyFlags.honeycombDataset != "" {
// Send to honeycomb
log.Printf("Sending results to honeycomb dataset %q\n", historyFlags.honeycombDataset)
hc, err := libhoney.NewClient(libhoney.ClientConfig{
Dataset: historyFlags.honeycombDataset,
APIKey: historyFlags.honeycombToken,
})
if err != nil {
log.Fatal("libhoney.NewClient: ", err)
}
var events []*libhoney.Event
for _, record := range mapToRecords(totals) {
recordDateString := record[0]
ev := hc.NewEvent()
ev.Timestamp, _ = time.Parse(dateFormat, recordDateString)
ev.AddField("build_count", totals[recordDateString]) // date:count
ev.AddField("incident_minutes", incidents[recordDateString]) // date:minutes
ev.AddField("flake_count", flakes[recordDateString]) // date:count
events = append(events, ev)
}
// send all at once
log.Printf("Sending %d events to Honeycomb\n", len(events))
var errs error
for _, ev := range events {
if err := ev.Send(); err != nil {
errs = errors.Append(errs, err)
}
}
hc.Close()
if err != nil {
log.Fatal("honeycomb.Send: ", err)
}
// log events that do not send
for _, ev := range events {
if strings.Contains(ev.String(), "sent:false") {
log.Printf("An event did not send: %s", ev.String())
}
}
}
log.Println("done!")