diff --git a/.gitignore b/.gitignore index 3ea14660d79..4816466e6f9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# Temporary directory for whatever you want +.tmp/ + # Vim *.swp diff --git a/dev/buildchecker/README.md b/dev/buildchecker/README.md index 21add73d940..534ad18e913 100644 --- a/dev/buildchecker/README.md +++ b/dev/buildchecker/README.md @@ -28,10 +28,26 @@ Also see the [`buildchecker` GitHub Action workflow](../../.github/workflows/bui Writes aggregated historical data, including the builds it finds, to a few files. ```sh -go run ./dev/buildchecker -buildkite.token=$BUILDKITE_TOKEN -failures.timeout=999 -created.from="2021-08-01" history +go run ./dev/buildchecker \ + -buildkite.token=$BUILDKITE_TOKEN \ + -failures.timeout=999 \ + -created.from="2021-08-01" \ + history ``` -To load builds from a file instead of fetching from Buildkite, use `-load-from="$FILE"`. +To load builds from a file instead of fetching from Buildkite, use `-builds.load-from="$FILE"`. + +You can also send metrics to Honeycomb with `-honeycomb.dataset` and `-honeycomb.token`: + +```sh +go run ./dev/buildchecker \ + -builds.load-from=".tmp/builds.json" \ + -failures.timeout=999 \ + -created.from="2021-08-01" \ + -honeycomb.dataset="buildkite-history" \ + -honeycomb.token=$HONEYCOMB_TOKEN \ + history +``` ## Tokens diff --git a/dev/buildchecker/history.go b/dev/buildchecker/history.go index c6011e937fc..6012fbb6a84 100644 --- a/dev/buildchecker/history.go +++ b/dev/buildchecker/history.go @@ -65,8 +65,10 @@ func generateHistory(builds []buildkite.Build, windowStart time.Time, opts Check return } +const dateFormat = "2006-01-02" + func buildDate(created time.Time) string { - return created.Format("2006/01/02") + return created.Format(dateFormat) } func mapToRecords(m map[string]int) (records [][]string) { @@ -75,20 +77,29 @@ func mapToRecords(m map[string]int) (records [][]string) { } // Sort by date ascending sort.Slice(records, func(i, j int) bool { - iDate, _ := time.Parse("2006/01/02", records[i][0]) - jDate, _ := time.Parse("2006/01/02", records[j][0]) + iDate, _ := time.Parse(dateFormat, records[i][0]) + jDate, _ := time.Parse(dateFormat, records[j][0]) return iDate.Before(jDate) }) - // TODO Fill in the gaps maybe? - // prev := records[0] - // for _, r := range records { - // rDate, _ := time.Parse("2006/01/02", r[0]) - // prevDate, _ := time.Parse("2006/01/02", prev[0]) - // if rDate.Sub(prevDate) > 24*time.Hour { - // records = append(a[:index+1], a[index:]...) - // a[index] = value - // } - // prev = r - // } + if len(records) <= 1 { + return + } + // Fill in the gaps + prev := records[0] + length := len(records) + for index := 0; index < length; index++ { + record := records[index] + recordDate, _ := time.Parse(dateFormat, record[0]) + prevDate, _ := time.Parse(dateFormat, prev[0]) + + for gapDate := prevDate.Add(24 * time.Hour); recordDate.Sub(gapDate) >= 24*time.Hour; gapDate = gapDate.Add(24 * time.Hour) { + insertRecord := []string{gapDate.Format(dateFormat), "0"} + records = append(records[:index], append([][]string{insertRecord}, records[index:]...)...) + index += 1 + length += 1 + } + + prev = record + } return } diff --git a/dev/buildchecker/history_test.go b/dev/buildchecker/history_test.go index 8139dd2e9cf..0e7517db71f 100644 --- a/dev/buildchecker/history_test.go +++ b/dev/buildchecker/history_test.go @@ -10,7 +10,7 @@ import ( func TestGenerateHistory(t *testing.T) { day := time.Date(2006, 01, 02, 0, 0, 0, 0, time.UTC) - dayString := day.Format("2006/01/02") + dayString := day.Format("2006-01-02") tests := []struct { name string @@ -137,3 +137,46 @@ func TestGenerateHistory(t *testing.T) { }) } } + +func TestMapToRecords(t *testing.T) { + tests := []struct { + name string + arg map[string]int + wantRecords [][]string + }{{ + name: "sorted", + arg: map[string]int{ + "2022-01-02": 2, + "2022-01-01": 1, + "2022-01-03": 3, + }, + wantRecords: [][]string{ + {"2022-01-01", "1"}, + {"2022-01-02", "2"}, + {"2022-01-03", "3"}, + }, + }, { + name: "gaps filled in", + arg: map[string]int{ + "2022-01-01": 1, + "2022-01-03": 3, + "2022-01-06": 6, + "2022-01-07": 7, + }, + wantRecords: [][]string{ + {"2022-01-01", "1"}, + {"2022-01-02", "0"}, + {"2022-01-03", "3"}, + {"2022-01-04", "0"}, + {"2022-01-05", "0"}, + {"2022-01-06", "6"}, + {"2022-01-07", "7"}, + }, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotRecords := mapToRecords(tt.arg) + assert.Equal(t, tt.wantRecords, gotRecords) + }) + } +} diff --git a/dev/buildchecker/main.go b/dev/buildchecker/main.go index 486068fd377..5176bfba4ac 100644 --- a/dev/buildchecker/main.go +++ b/dev/buildchecker/main.go @@ -8,11 +8,13 @@ import ( "fmt" "log" "os" + "path/filepath" "strings" "time" "github.com/buildkite/go-buildkite/v3/buildkite" "github.com/google/go-github/v41/github" + libhoney "github.com/honeycombio/libhoney-go" "github.com/slack-go/slack" "golang.org/x/oauth2" @@ -54,8 +56,11 @@ func main() { historyFlags := &cmdHistoryFlags{} flag.StringVar(&historyFlags.createdFromDate, "created.from", "", "date in YYYY-MM-DD format") flag.StringVar(&historyFlags.createdToDate, "created.to", "", "date in YYYY-MM-DD format") - flag.StringVar(&historyFlags.loadFrom, "load-from", "", "file to load builds from") - flag.StringVar(&historyFlags.writeTo, "write-to", "builds.json", "file to write builds to (unused if loading from file)") + flag.StringVar(&historyFlags.buildsLoadFrom, "builds.load-from", "", "file to load builds from - if unset, fetches from Buildkite") + flag.StringVar(&historyFlags.buildsWriteTo, "builds.write-to", ".tmp/builds.json", "file to write builds to (unused if loading from file)") + flag.StringVar(&historyFlags.resultsCsvPath, "csv", ".tmp/", "path for CSV results exports") + flag.StringVar(&historyFlags.honeycombDataset, "honeycomb.dataset", "", "honeycomb dataset to publish to") + flag.StringVar(&historyFlags.honeycombToken, "honeycomb.token", "", "honeycomb API token") flags.Parse() @@ -162,12 +167,16 @@ func cmdCheck(ctx context.Context, flags *Flags, checkFlags *cmdCheckFlags) { type cmdHistoryFlags struct { createdFromDate string createdToDate string - loadFrom string - writeTo string + + buildsLoadFrom string + buildsWriteTo string + + resultsCsvPath string + honeycombDataset string + honeycombToken string } func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags) { - // Time range var err error createdFrom := time.Now().Add(-24 * time.Hour) @@ -188,7 +197,8 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags // Get builds var builds []buildkite.Build - if historyFlags.loadFrom == "" { + if historyFlags.buildsLoadFrom == "" { + // Load builds from Buildkite if no cached builds configured log.Println("fetching builds from Buildkite") // Buildkite client @@ -201,8 +211,10 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags // Paginate results var nextPage = 1 var pages int + log.Printf("request paging progress:") for nextPage > 0 { pages++ + fmt.Printf(" %d", pages) // Newest is returned first https://buildkite.com/docs/apis/rest-api/builds#list-builds-for-a-pipeline pageBuilds, resp, err := bkc.Builds.ListByPipeline("sourcegraph", flags.Pipeline, &buildkite.BuildsListOptions{ @@ -211,10 +223,8 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags CreatedTo: createdTo, IncludeRetriedJobs: false, ListOptions: buildkite.ListOptions{ - Page: nextPage, - // Fix to high page size just in case, default is 30 - // https://buildkite.com/docs/apis/rest-api#pagination - PerPage: 99, + Page: nextPage, + PerPage: 50, }, }) if err != nil { @@ -224,20 +234,24 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags builds = append(builds, pageBuilds...) nextPage = resp.NextPage } + fmt.Println() // end line for progress spinner - buildsJSON, err := json.Marshal(&builds) - if err != nil { - log.Fatal("json.Marshal(&builds): ", err) - } - if historyFlags.writeTo != "" { - if err := os.WriteFile(historyFlags.writeTo, buildsJSON, os.ModePerm); err != nil { + if historyFlags.buildsWriteTo != "" { + // Cache builds for ease of re-running analyses + log.Printf("Caching discovered builts in %s\n", historyFlags.buildsWriteTo) + buildsJSON, err := json.Marshal(&builds) + if err != nil { + log.Fatal("json.Marshal(&builds): ", err) + } + if err := os.WriteFile(historyFlags.buildsWriteTo, buildsJSON, os.ModePerm); err != nil { log.Fatal("os.WriteFile: ", err) } - log.Println("wrote to " + historyFlags.writeTo) + log.Println("wrote to " + historyFlags.buildsWriteTo) } } else { - log.Printf("loading builds from %s\n", historyFlags.loadFrom) - data, err := os.ReadFile(historyFlags.loadFrom) + // Load builds from configured path + log.Printf("loading builds from %s\n", historyFlags.buildsLoadFrom) + data, err := os.ReadFile(historyFlags.buildsLoadFrom) if err != nil { log.Fatal("os.ReadFile: ", err) } @@ -268,18 +282,63 @@ func cmdHistory(ctx context.Context, flags *Flags, historyFlags *cmdHistoryFlags log.Printf("inferred %d builds as failed", inferredFail) // Generate history - totals, flakes, incidents := generateHistory(builds, createdTo, CheckOptions{ + checkOpts := CheckOptions{ FailuresThreshold: flags.FailuresThreshold, BuildTimeout: time.Duration(flags.FailuresTimeoutMins) * time.Minute, - }) + } + log.Printf("running analyses with options: %+v\n", checkOpts) + totals, flakes, incidents := generateHistory(builds, createdTo, checkOpts) - // Write to files - var errs error - errs = errors.CombineErrors(errs, writeCSV("./totals.csv", mapToRecords(totals))) - errs = errors.CombineErrors(errs, writeCSV("./flakes.csv", mapToRecords(flakes))) - errs = errors.CombineErrors(errs, writeCSV("./incidents.csv", mapToRecords(incidents))) - if errs != nil { - log.Fatal("csv.WriteAll: ", errs) + if historyFlags.resultsCsvPath != "" { + // Write to files + log.Printf("Writing CSV results to %s\n", historyFlags.resultsCsvPath) + var errs error + errs = errors.CombineErrors(errs, writeCSV(filepath.Join(historyFlags.resultsCsvPath, "totals.csv"), mapToRecords(totals))) + errs = errors.CombineErrors(errs, writeCSV(filepath.Join(historyFlags.resultsCsvPath, "flakes.csv"), mapToRecords(flakes))) + errs = errors.CombineErrors(errs, writeCSV(filepath.Join(historyFlags.resultsCsvPath, "incidents.csv"), mapToRecords(incidents))) + if errs != nil { + log.Fatal("csv.WriteAll: ", errs) + } + } + if historyFlags.honeycombDataset != "" { + // Send to honeycomb + log.Printf("Sending results to honeycomb dataset %q\n", historyFlags.honeycombDataset) + hc, err := libhoney.NewClient(libhoney.ClientConfig{ + Dataset: historyFlags.honeycombDataset, + APIKey: historyFlags.honeycombToken, + }) + if err != nil { + log.Fatal("libhoney.NewClient: ", err) + } + var events []*libhoney.Event + for _, record := range mapToRecords(totals) { + recordDateString := record[0] + ev := hc.NewEvent() + ev.Timestamp, _ = time.Parse(dateFormat, recordDateString) + ev.AddField("build_count", totals[recordDateString]) // date:count + ev.AddField("incident_minutes", incidents[recordDateString]) // date:minutes + ev.AddField("flake_count", flakes[recordDateString]) // date:count + events = append(events, ev) + } + + // send all at once + log.Printf("Sending %d events to Honeycomb\n", len(events)) + var errs error + for _, ev := range events { + if err := ev.Send(); err != nil { + errs = errors.Append(errs, err) + } + } + hc.Close() + if err != nil { + log.Fatal("honeycomb.Send: ", err) + } + // log events that do not send + for _, ev := range events { + if strings.Contains(ev.String(), "sent:false") { + log.Printf("An event did not send: %s", ev.String()) + } + } } log.Println("done!")